@@ -77,3 +77,20 @@ AWS_ACCESS_KEY="your aws access key" |
||
77 | 77 |
|
78 | 78 |
# Set AWS_SANDBOX to true if you're developing Huginn code. |
79 | 79 |
AWS_SANDBOX=false |
80 |
+ |
|
81 |
+######################## |
|
82 |
+# Various Settings # |
|
83 |
+######################## |
|
84 |
+ |
|
85 |
+# Allow JSONPath eval expresions. i.e., $..price[?(@ < 20)] |
|
86 |
+# You should not allow this on a shared Huginn box because it is not secure. |
|
87 |
+ALLOW_JSONPATH_EVAL=false |
|
88 |
+ |
|
89 |
+# Enable this setting to allow insecure Agents like the ShellCommandAgent. Only do this |
|
90 |
+# when you trust everyone using your Huginn installation. |
|
91 |
+ENABLE_INSECURE_AGENTS=false |
|
92 |
+ |
|
93 |
+# Use Graphviz for generating diagrams instead of using Google Chart |
|
94 |
+# Tools. Specify a dot(1) command path built with SVG support |
|
95 |
+# enabled. |
|
96 |
+#USE_GRAPHVIZ_DOT=dot |
@@ -7,6 +7,7 @@ capybara-*.html |
||
7 | 7 |
/vendor/bundle |
8 | 8 |
/log/* |
9 | 9 |
/tmp/* |
10 |
+!/tmp/.gitkeep |
|
10 | 11 |
/db/*.sqlite3 |
11 | 12 |
/public/system/* |
12 | 13 |
/coverage/ |
@@ -4,6 +4,8 @@ rvm: |
||
4 | 4 |
- 2.0.0 |
5 | 5 |
- 2.1.1 |
6 | 6 |
- 1.9.3 |
7 |
+before_install: |
|
8 |
+ - travis_retry gem install bundler |
|
7 | 9 |
before_script: |
8 | 10 |
- mysql -e 'create database huginn_test;' |
9 | 11 |
- bundle exec rake db:migrate db:test:prepare |
@@ -1,5 +1,7 @@ |
||
1 | 1 |
# Changes |
2 | 2 |
|
3 |
+* 0.5 (April 20, 2014) - Tons of new additions! FtpsiteAgent; WebsiteAgent has xpath, multiple URL, and encoding support; regexp extractions in EventFormattingAgent; PostAgent takes default params and headers, and can make GET requests; local Graphviz support; ShellCommandAgent; BasecampAgent; HipchatAgent; and lots of bug fixes! |
|
4 |
+* 0.4 (April 10, 2014) - WebHooksController has been renamed to WebRequestsController and all HTTP verbs are now accepted and passed through to Agents' #receive\_web\_request method. The new DataOutputAgent returns JSON or RSS feeds of incoming Events via external web request. [Documentation is on the wiki.](https://github.com/cantino/huginn/wiki/Creating-a-new-agent#receiving-web-requests). |
|
3 | 5 |
* 0.31 (Jan 2, 2014) - Agents now have an optional keep\_events\_for option that is propagated to created events' expires\_at field, and they update their events' expires\_at fields on change. |
4 | 6 |
* 0.3 (Jan 1, 2014) - Remove symbolization of memory, options, and payloads; convert memory, options, and payloads to JSON from YAML. Migration will perform conversion and adjust tables to be UTF-8. Recommend making a DB backup before migrating. |
5 | 7 |
* 0.2 (Nov 6, 2013) - PeakDetectorAgent now uses `window_duration_in_days` and `min_peak_spacing_in_days`. Additionally, peaks trigger when the time series rises over the standard deviation multiple, not after it starts to fall. |
@@ -1,63 +1,65 @@ |
||
1 | 1 |
source 'https://rubygems.org' |
2 | 2 |
|
3 |
-gem 'rails' |
|
4 |
-gem 'rake' |
|
5 |
-gem 'mysql2' |
|
6 |
-gem 'devise' |
|
7 |
-gem 'kaminari' |
|
8 |
-gem 'bootstrap-kaminari-views' |
|
9 |
-gem "rufus-scheduler", :require => false |
|
3 |
+gem 'rails', '3.2.17' |
|
4 |
+gem 'mysql2', '~> 0.3.13' |
|
5 |
+gem 'devise', '~> 3.0.0' |
|
6 |
+gem 'kaminari', '~> 0.14.1' |
|
7 |
+gem 'bootstrap-kaminari-views', '~> 0.0.2' |
|
8 |
+gem 'rufus-scheduler', '~> 3.0.7', require: false |
|
10 | 9 |
gem 'json', '>= 1.7.7' |
11 |
-gem 'jsonpath' |
|
12 |
-gem 'twilio-ruby' |
|
13 |
-gem 'ruby-growl' |
|
10 |
+gem 'jsonpath', '~> 0.5.3' |
|
11 |
+gem 'twilio-ruby', '~> 3.10.0' |
|
12 |
+gem 'ruby-growl', '~> 4.1.0' |
|
14 | 13 |
|
15 |
-gem 'delayed_job' |
|
16 |
-gem 'delayed_job_active_record'#, "~> 0.3.3" # newer was giving a strange MySQL error |
|
17 |
-gem "daemons" |
|
18 |
-# gem "delayed_job_web" |
|
14 |
+gem 'delayed_job', '~> 4.0.0' |
|
15 |
+gem 'delayed_job_active_record', '~> 4.0.0' |
|
16 |
+gem 'daemons', '~> 1.1.9' |
|
19 | 17 |
|
20 |
-gem 'foreman' |
|
21 |
-gem 'dotenv-rails', :groups => [:development, :test] |
|
18 |
+# To enable DelayedJobWeb, see the 'Enable DelayedJobWeb' section of the README. |
|
19 |
+# gem 'delayed_job_web' |
|
20 |
+ |
|
21 |
+gem 'foreman', '~> 0.63.0' |
|
22 | 22 |
|
23 | 23 |
gem 'sass-rails', '~> 3.2.3' |
24 | 24 |
gem 'coffee-rails', '~> 3.2.1' |
25 | 25 |
gem 'uglifier', '>= 1.0.3' |
26 |
-gem 'select2-rails' |
|
27 |
-gem 'jquery-rails' |
|
28 |
-gem 'ace-rails-ap' |
|
29 |
- |
|
30 |
-gem 'geokit-rails3' |
|
31 |
-gem 'kramdown' |
|
32 |
-gem "typhoeus" |
|
33 |
-gem 'nokogiri' |
|
34 |
-gem 'wunderground' |
|
35 |
-gem 'forecast_io' |
|
36 |
-gem 'rturk' |
|
37 |
- |
|
38 |
-gem "twitter", '~> 5.7.1' |
|
39 |
-gem 'twitter-stream', :git => 'https://github.com/cantino/twitter-stream', :branch => 'master' |
|
40 |
-gem 'em-http-request' |
|
41 |
-gem 'weibo_2' |
|
42 |
- |
|
43 |
-gem 'therubyracer' |
|
44 |
- |
|
45 |
-platforms :ruby_18 do |
|
46 |
- gem 'system_timer' |
|
47 |
- gem 'fastercsv' |
|
48 |
-end |
|
26 |
+gem 'select2-rails', '~> 3.4.3' |
|
27 |
+gem 'jquery-rails', '~> 3.0.4' |
|
28 |
+gem 'ace-rails-ap', '~> 2.0.1' |
|
29 |
+ |
|
30 |
+# geokit-rails doesn't work with geokit 1.8.X but it specifies ~> 1.5 |
|
31 |
+# in its own Gemfile. |
|
32 |
+gem 'geokit', '~> 1.6.7' |
|
33 |
+gem 'geokit-rails3', '~> 0.1.5' |
|
34 |
+ |
|
35 |
+gem 'kramdown', '~> 1.1.0' |
|
36 |
+gem 'typhoeus', '~> 0.6.3' |
|
37 |
+gem 'nokogiri', '~> 1.6.0' |
|
38 |
+ |
|
39 |
+gem 'wunderground', '~> 1.1.0' |
|
40 |
+gem 'forecast_io', '~> 2.0.0' |
|
41 |
+gem 'rturk', '~> 2.11.0' |
|
42 |
+ |
|
43 |
+gem 'twitter', '~> 5.7.1' |
|
44 |
+gem 'twitter-stream', github: 'cantino/twitter-stream', branch: 'master' |
|
45 |
+gem 'em-http-request', '~> 1.1.2' |
|
46 |
+gem 'weibo_2', '~> 0.1.4' |
|
47 |
+gem 'hipchat', '~> 1.1.0' |
|
48 |
+ |
|
49 |
+gem 'therubyracer', '~> 0.12.1' |
|
49 | 50 |
|
50 | 51 |
group :development do |
51 |
- gem 'pry' |
|
52 | 52 |
gem 'binding_of_caller' |
53 | 53 |
gem 'better_errors' |
54 | 54 |
end |
55 | 55 |
|
56 | 56 |
group :development, :test do |
57 |
+ gem 'dotenv-rails' |
|
58 |
+ gem 'pry' |
|
57 | 59 |
gem 'rspec-rails' |
58 | 60 |
gem 'rspec' |
59 | 61 |
gem 'shoulda-matchers' |
60 | 62 |
gem 'rr' |
61 |
- gem 'webmock', :require => false |
|
62 |
- gem 'coveralls', :require => false |
|
63 |
+ gem 'webmock', require: false |
|
64 |
+ gem 'coveralls', require: false |
|
63 | 65 |
end |
@@ -1,5 +1,5 @@ |
||
1 | 1 |
GIT |
2 |
- remote: https://github.com/cantino/twitter-stream |
|
2 |
+ remote: git://github.com/cantino/twitter-stream.git |
|
3 | 3 |
revision: fde6bed2b62ca487d49e4a57381bbfca6e33361b |
4 | 4 |
branch: master |
5 | 5 |
specs: |
@@ -12,12 +12,12 @@ GEM |
||
12 | 12 |
remote: https://rubygems.org/ |
13 | 13 |
specs: |
14 | 14 |
ace-rails-ap (2.0.1) |
15 |
- actionmailer (3.2.13) |
|
16 |
- actionpack (= 3.2.13) |
|
17 |
- mail (~> 2.5.3) |
|
18 |
- actionpack (3.2.13) |
|
19 |
- activemodel (= 3.2.13) |
|
20 |
- activesupport (= 3.2.13) |
|
15 |
+ actionmailer (3.2.17) |
|
16 |
+ actionpack (= 3.2.17) |
|
17 |
+ mail (~> 2.5.4) |
|
18 |
+ actionpack (3.2.17) |
|
19 |
+ activemodel (= 3.2.17) |
|
20 |
+ activesupport (= 3.2.17) |
|
21 | 21 |
builder (~> 3.0.0) |
22 | 22 |
erubis (~> 2.7.0) |
23 | 23 |
journey (~> 1.0.4) |
@@ -25,68 +25,69 @@ GEM |
||
25 | 25 |
rack-cache (~> 1.2) |
26 | 26 |
rack-test (~> 0.6.1) |
27 | 27 |
sprockets (~> 2.2.1) |
28 |
- activemodel (3.2.13) |
|
29 |
- activesupport (= 3.2.13) |
|
28 |
+ activemodel (3.2.17) |
|
29 |
+ activesupport (= 3.2.17) |
|
30 | 30 |
builder (~> 3.0.0) |
31 |
- activerecord (3.2.13) |
|
32 |
- activemodel (= 3.2.13) |
|
33 |
- activesupport (= 3.2.13) |
|
31 |
+ activerecord (3.2.17) |
|
32 |
+ activemodel (= 3.2.17) |
|
33 |
+ activesupport (= 3.2.17) |
|
34 | 34 |
arel (~> 3.0.2) |
35 | 35 |
tzinfo (~> 0.3.29) |
36 |
- activeresource (3.2.13) |
|
37 |
- activemodel (= 3.2.13) |
|
38 |
- activesupport (= 3.2.13) |
|
39 |
- activesupport (3.2.13) |
|
40 |
- i18n (= 0.6.1) |
|
36 |
+ activeresource (3.2.17) |
|
37 |
+ activemodel (= 3.2.17) |
|
38 |
+ activesupport (= 3.2.17) |
|
39 |
+ activesupport (3.2.17) |
|
40 |
+ i18n (~> 0.6, >= 0.6.4) |
|
41 | 41 |
multi_json (~> 1.0) |
42 |
- addressable (2.3.5) |
|
42 |
+ addressable (2.3.6) |
|
43 | 43 |
arel (3.0.3) |
44 |
- atomic (1.1.14) |
|
45 |
- bcrypt-ruby (3.1.1) |
|
44 |
+ bcrypt (3.1.7) |
|
45 |
+ bcrypt-ruby (3.1.5) |
|
46 |
+ bcrypt (>= 3.1.3) |
|
46 | 47 |
better_errors (1.1.0) |
47 | 48 |
coderay (>= 1.0.0) |
48 | 49 |
erubis (>= 2.6.6) |
49 | 50 |
binding_of_caller (0.7.2) |
50 | 51 |
debug_inspector (>= 0.0.1) |
51 |
- bootstrap-kaminari-views (0.0.2) |
|
52 |
+ bootstrap-kaminari-views (0.0.3) |
|
52 | 53 |
kaminari (>= 0.13) |
53 | 54 |
rails (>= 3.1) |
54 | 55 |
buftok (0.2.0) |
55 | 56 |
builder (3.0.4) |
56 |
- coderay (1.0.9) |
|
57 |
+ coderay (1.1.0) |
|
57 | 58 |
coffee-rails (3.2.2) |
58 | 59 |
coffee-script (>= 2.2.0) |
59 | 60 |
railties (~> 3.2.0) |
60 | 61 |
coffee-script (2.2.0) |
61 | 62 |
coffee-script-source |
62 | 63 |
execjs |
63 |
- coffee-script-source (1.6.3) |
|
64 |
- cookiejar (0.3.1) |
|
64 |
+ coffee-script-source (1.7.0) |
|
65 |
+ cookiejar (0.3.2) |
|
65 | 66 |
coveralls (0.7.0) |
66 | 67 |
multi_json (~> 1.3) |
67 | 68 |
rest-client |
68 | 69 |
simplecov (>= 0.7) |
69 | 70 |
term-ansicolor |
70 | 71 |
thor |
71 |
- crack (0.4.1) |
|
72 |
- safe_yaml (~> 0.9.0) |
|
72 |
+ crack (0.4.2) |
|
73 |
+ safe_yaml (~> 1.0.0) |
|
73 | 74 |
daemons (1.1.9) |
74 | 75 |
debug_inspector (0.0.2) |
75 |
- delayed_job (4.0.0) |
|
76 |
- activesupport (>= 3.0, < 4.1) |
|
77 |
- delayed_job_active_record (4.0.0) |
|
78 |
- activerecord (>= 3.0, < 4.1) |
|
76 |
+ delayed_job (4.0.1) |
|
77 |
+ activesupport (>= 3.0, < 4.2) |
|
78 |
+ delayed_job_active_record (4.0.1) |
|
79 |
+ activerecord (>= 3.0, < 4.2) |
|
79 | 80 |
delayed_job (>= 3.0, < 4.1) |
80 |
- devise (3.0.0) |
|
81 |
+ devise (3.0.4) |
|
81 | 82 |
bcrypt-ruby (~> 3.0) |
82 | 83 |
orm_adapter (~> 0.1) |
83 | 84 |
railties (>= 3.2.6, < 5) |
84 | 85 |
warden (~> 1.2.3) |
85 |
- diff-lcs (1.2.4) |
|
86 |
- docile (1.1.1) |
|
87 |
- dotenv (0.9.0) |
|
88 |
- dotenv-rails (0.9.0) |
|
89 |
- dotenv (= 0.9.0) |
|
86 |
+ diff-lcs (1.2.5) |
|
87 |
+ docile (1.1.3) |
|
88 |
+ dotenv (0.10.0) |
|
89 |
+ dotenv-rails (0.10.0) |
|
90 |
+ dotenv (= 0.10.0) |
|
90 | 91 |
em-http-request (1.1.2) |
91 | 92 |
addressable (>= 2.3.4) |
92 | 93 |
cookiejar |
@@ -96,19 +97,16 @@ GEM |
||
96 | 97 |
em-socksify (0.3.0) |
97 | 98 |
eventmachine (>= 1.0.0.beta.4) |
98 | 99 |
equalizer (0.0.9) |
99 |
- erector (0.9.0) |
|
100 |
+ erector (0.10.0) |
|
100 | 101 |
treetop (>= 1.2.3) |
101 | 102 |
erubis (2.7.0) |
102 |
- ethon (0.5.12) |
|
103 |
+ ethon (0.7.0) |
|
103 | 104 |
ffi (>= 1.3.0) |
104 |
- mime-types (~> 1.18) |
|
105 | 105 |
eventmachine (1.0.3) |
106 |
- execjs (1.4.0) |
|
107 |
- multi_json (~> 1.0) |
|
106 |
+ execjs (2.0.2) |
|
108 | 107 |
faraday (0.9.0) |
109 | 108 |
multipart-post (>= 1.2, < 3) |
110 |
- fastercsv (1.5.5) |
|
111 |
- ffi (1.9.0) |
|
109 |
+ ffi (1.9.3) |
|
112 | 110 |
forecast_io (2.0.0) |
113 | 111 |
faraday |
114 | 112 |
hashie |
@@ -116,88 +114,88 @@ GEM |
||
116 | 114 |
foreman (0.63.0) |
117 | 115 |
dotenv (>= 0.7) |
118 | 116 |
thor (>= 0.13.6) |
119 |
- geokit (1.6.5) |
|
120 |
- multi_json |
|
117 |
+ geokit (1.6.7) |
|
118 |
+ multi_json (>= 1.3.2) |
|
121 | 119 |
geokit-rails3 (0.1.5) |
122 | 120 |
geokit (~> 1.5) |
123 | 121 |
rails (~> 3.0) |
124 | 122 |
hashie (2.0.5) |
125 | 123 |
hike (1.2.3) |
124 |
+ hipchat (1.1.0) |
|
125 |
+ httparty |
|
126 | 126 |
http (0.5.0) |
127 | 127 |
http_parser.rb |
128 | 128 |
http_parser.rb (0.6.0) |
129 |
- httparty (0.11.0) |
|
130 |
- multi_json (~> 1.0) |
|
129 |
+ httparty (0.13.1) |
|
130 |
+ json (~> 1.8) |
|
131 | 131 |
multi_xml (>= 0.5.2) |
132 |
- httpauth (0.2.0) |
|
133 |
- i18n (0.6.1) |
|
132 |
+ i18n (0.6.9) |
|
134 | 133 |
journey (1.0.4) |
135 | 134 |
jquery-rails (3.0.4) |
136 | 135 |
railties (>= 3.0, < 5.0) |
137 | 136 |
thor (>= 0.14, < 2.0) |
138 | 137 |
json (1.8.1) |
139 |
- jsonpath (0.5.3) |
|
138 |
+ jsonpath (0.5.6) |
|
140 | 139 |
multi_json |
141 |
- jwt (0.1.8) |
|
140 |
+ jwt (0.1.11) |
|
142 | 141 |
multi_json (>= 1.5) |
143 | 142 |
kaminari (0.14.1) |
144 | 143 |
actionpack (>= 3.0.0) |
145 | 144 |
activesupport (>= 3.0.0) |
146 | 145 |
kramdown (1.1.0) |
147 | 146 |
libv8 (3.16.14.3) |
148 |
- macaddr (1.6.7) |
|
147 |
+ macaddr (1.7.1) |
|
149 | 148 |
systemu (~> 2.6.2) |
150 | 149 |
mail (2.5.4) |
151 | 150 |
mime-types (~> 1.16) |
152 | 151 |
treetop (~> 1.4.8) |
153 |
- memoizable (0.4.0) |
|
154 |
- thread_safe (~> 0.1.3) |
|
155 |
- method_source (0.8.1) |
|
156 |
- mime-types (1.24) |
|
157 |
- mini_portile (0.5.1) |
|
158 |
- multi_json (1.7.9) |
|
152 |
+ memoizable (0.4.2) |
|
153 |
+ thread_safe (~> 0.3, >= 0.3.1) |
|
154 |
+ method_source (0.8.2) |
|
155 |
+ mime-types (1.25.1) |
|
156 |
+ mini_portile (0.5.3) |
|
157 |
+ multi_json (1.9.2) |
|
159 | 158 |
multi_xml (0.5.5) |
160 | 159 |
multipart-post (2.0.0) |
161 |
- mysql2 (0.3.13) |
|
160 |
+ mysql2 (0.3.15) |
|
162 | 161 |
naught (1.0.0) |
163 |
- nokogiri (1.6.0) |
|
162 |
+ nokogiri (1.6.1) |
|
164 | 163 |
mini_portile (~> 0.5.0) |
165 |
- oauth2 (0.9.2) |
|
166 |
- faraday (~> 0.8) |
|
167 |
- httpauth (~> 0.2) |
|
168 |
- jwt (~> 0.1.4) |
|
169 |
- multi_json (~> 1.0) |
|
164 |
+ oauth2 (0.9.3) |
|
165 |
+ faraday (>= 0.8, < 0.10) |
|
166 |
+ jwt (~> 0.1.8) |
|
167 |
+ multi_json (~> 1.3) |
|
170 | 168 |
multi_xml (~> 0.5) |
171 | 169 |
rack (~> 1.2) |
172 |
- orm_adapter (0.4.0) |
|
173 |
- polyglot (0.3.3) |
|
174 |
- pry (0.9.12.2) |
|
175 |
- coderay (~> 1.0.5) |
|
170 |
+ orm_adapter (0.5.0) |
|
171 |
+ polyglot (0.3.4) |
|
172 |
+ pry (0.9.12.6) |
|
173 |
+ coderay (~> 1.0) |
|
176 | 174 |
method_source (~> 0.8) |
177 | 175 |
slop (~> 3.4) |
178 | 176 |
rack (1.4.5) |
179 | 177 |
rack-cache (1.2) |
180 | 178 |
rack (>= 0.4) |
181 |
- rack-ssl (1.3.3) |
|
179 |
+ rack-ssl (1.3.4) |
|
182 | 180 |
rack |
183 | 181 |
rack-test (0.6.2) |
184 | 182 |
rack (>= 1.0) |
185 |
- rails (3.2.13) |
|
186 |
- actionmailer (= 3.2.13) |
|
187 |
- actionpack (= 3.2.13) |
|
188 |
- activerecord (= 3.2.13) |
|
189 |
- activeresource (= 3.2.13) |
|
190 |
- activesupport (= 3.2.13) |
|
183 |
+ rails (3.2.17) |
|
184 |
+ actionmailer (= 3.2.17) |
|
185 |
+ actionpack (= 3.2.17) |
|
186 |
+ activerecord (= 3.2.17) |
|
187 |
+ activeresource (= 3.2.17) |
|
188 |
+ activesupport (= 3.2.17) |
|
191 | 189 |
bundler (~> 1.0) |
192 |
- railties (= 3.2.13) |
|
193 |
- railties (3.2.13) |
|
194 |
- actionpack (= 3.2.13) |
|
195 |
- activesupport (= 3.2.13) |
|
190 |
+ railties (= 3.2.17) |
|
191 |
+ railties (3.2.17) |
|
192 |
+ actionpack (= 3.2.17) |
|
193 |
+ activesupport (= 3.2.17) |
|
196 | 194 |
rack-ssl (~> 1.3.2) |
197 | 195 |
rake (>= 0.8.7) |
198 | 196 |
rdoc (~> 3.4) |
199 | 197 |
thor (>= 0.14.6, < 2.0) |
200 |
- rake (10.1.0) |
|
198 |
+ rake (10.2.2) |
|
201 | 199 |
rdoc (3.12.2) |
202 | 200 |
json (~> 1.4) |
203 | 201 |
ref (1.0.5) |
@@ -208,35 +206,36 @@ GEM |
||
208 | 206 |
rspec-core (~> 2.14.0) |
209 | 207 |
rspec-expectations (~> 2.14.0) |
210 | 208 |
rspec-mocks (~> 2.14.0) |
211 |
- rspec-core (2.14.5) |
|
212 |
- rspec-expectations (2.14.2) |
|
209 |
+ rspec-core (2.14.8) |
|
210 |
+ rspec-expectations (2.14.5) |
|
213 | 211 |
diff-lcs (>= 1.1.3, < 2.0) |
214 |
- rspec-mocks (2.14.3) |
|
215 |
- rspec-rails (2.14.0) |
|
212 |
+ rspec-mocks (2.14.6) |
|
213 |
+ rspec-rails (2.14.2) |
|
216 | 214 |
actionpack (>= 3.0) |
215 |
+ activemodel (>= 3.0) |
|
217 | 216 |
activesupport (>= 3.0) |
218 | 217 |
railties (>= 3.0) |
219 | 218 |
rspec-core (~> 2.14.0) |
220 | 219 |
rspec-expectations (~> 2.14.0) |
221 | 220 |
rspec-mocks (~> 2.14.0) |
222 |
- rturk (2.11.0) |
|
221 |
+ rturk (2.11.3) |
|
223 | 222 |
erector |
224 | 223 |
nokogiri |
225 | 224 |
rest-client |
226 | 225 |
ruby-growl (4.1) |
227 | 226 |
uuid (~> 2.3, >= 2.3.5) |
228 |
- rufus-scheduler (3.0.2) |
|
227 |
+ rufus-scheduler (3.0.7) |
|
229 | 228 |
tzinfo |
230 |
- safe_yaml (0.9.5) |
|
231 |
- sass (3.2.9) |
|
229 |
+ safe_yaml (1.0.2) |
|
230 |
+ sass (3.3.5) |
|
232 | 231 |
sass-rails (3.2.6) |
233 | 232 |
railties (~> 3.2.0) |
234 | 233 |
sass (>= 3.1.10) |
235 | 234 |
tilt (~> 1.3) |
236 |
- select2-rails (3.4.3) |
|
235 |
+ select2-rails (3.4.9) |
|
237 | 236 |
sass-rails |
238 | 237 |
thor (~> 0.14) |
239 |
- shoulda-matchers (2.2.0) |
|
238 |
+ shoulda-matchers (2.6.0) |
|
240 | 239 |
activesupport (>= 3.0.0) |
241 | 240 |
simple_oauth (0.2.0) |
242 | 241 |
simplecov (0.8.2) |
@@ -244,28 +243,26 @@ GEM |
||
244 | 243 |
multi_json |
245 | 244 |
simplecov-html (~> 0.8.0) |
246 | 245 |
simplecov-html (0.8.0) |
247 |
- slop (3.4.5) |
|
246 |
+ slop (3.5.0) |
|
248 | 247 |
sprockets (2.2.2) |
249 | 248 |
hike (~> 1.2) |
250 | 249 |
multi_json (~> 1.0) |
251 | 250 |
rack (~> 1.0) |
252 | 251 |
tilt (~> 1.1, != 1.3.0) |
253 |
- system_timer (1.2.4) |
|
254 | 252 |
systemu (2.6.4) |
255 |
- term-ansicolor (1.2.2) |
|
256 |
- tins (~> 0.8) |
|
257 |
- therubyracer (0.12.0) |
|
253 |
+ term-ansicolor (1.3.0) |
|
254 |
+ tins (~> 1.0) |
|
255 |
+ therubyracer (0.12.1) |
|
258 | 256 |
libv8 (~> 3.16.14.0) |
259 | 257 |
ref |
260 |
- thor (0.18.1) |
|
261 |
- thread_safe (0.1.3) |
|
262 |
- atomic |
|
258 |
+ thor (0.19.1) |
|
259 |
+ thread_safe (0.3.3) |
|
263 | 260 |
tilt (1.4.1) |
264 |
- tins (0.13.1) |
|
261 |
+ tins (1.1.0) |
|
265 | 262 |
treetop (1.4.15) |
266 | 263 |
polyglot |
267 | 264 |
polyglot (>= 0.3.1) |
268 |
- twilio-ruby (3.10.0) |
|
265 |
+ twilio-ruby (3.10.1) |
|
269 | 266 |
builder (>= 2.1.2) |
270 | 267 |
jwt (>= 0.1.2) |
271 | 268 |
multi_json (>= 1.3.0) |
@@ -280,12 +277,12 @@ GEM |
||
280 | 277 |
memoizable (~> 0.4.0) |
281 | 278 |
naught (~> 1.0) |
282 | 279 |
simple_oauth (~> 0.2.0) |
283 |
- typhoeus (0.6.3) |
|
284 |
- ethon (~> 0.5.11) |
|
285 |
- tzinfo (0.3.38) |
|
286 |
- uglifier (2.1.2) |
|
280 |
+ typhoeus (0.6.8) |
|
281 |
+ ethon (>= 0.7.0) |
|
282 |
+ tzinfo (0.3.39) |
|
283 |
+ uglifier (2.5.0) |
|
287 | 284 |
execjs (>= 0.3.0) |
288 |
- multi_json (~> 1.0, >= 1.0.2) |
|
285 |
+ json (>= 1.8.0) |
|
289 | 286 |
uuid (2.3.7) |
290 | 287 |
macaddr (~> 1.0) |
291 | 288 |
warden (1.2.3) |
@@ -293,9 +290,9 @@ GEM |
||
293 | 290 |
webmock (1.13.0) |
294 | 291 |
addressable (>= 2.2.7) |
295 | 292 |
crack (>= 0.3.2) |
296 |
- weibo_2 (0.1.4) |
|
293 |
+ weibo_2 (0.1.6) |
|
297 | 294 |
hashie (~> 2.0.4) |
298 |
- multi_json (~> 1.7.2) |
|
295 |
+ multi_json (~> 1) |
|
299 | 296 |
oauth2 (~> 0.9.1) |
300 | 297 |
rest-client (~> 1.6.7) |
301 | 298 |
wunderground (1.1.0) |
@@ -307,48 +304,47 @@ PLATFORMS |
||
307 | 304 |
ruby |
308 | 305 |
|
309 | 306 |
DEPENDENCIES |
310 |
- ace-rails-ap |
|
307 |
+ ace-rails-ap (~> 2.0.1) |
|
311 | 308 |
better_errors |
312 | 309 |
binding_of_caller |
313 |
- bootstrap-kaminari-views |
|
310 |
+ bootstrap-kaminari-views (~> 0.0.2) |
|
314 | 311 |
coffee-rails (~> 3.2.1) |
315 | 312 |
coveralls |
316 |
- daemons |
|
317 |
- delayed_job |
|
318 |
- delayed_job_active_record |
|
319 |
- devise |
|
313 |
+ daemons (~> 1.1.9) |
|
314 |
+ delayed_job (~> 4.0.0) |
|
315 |
+ delayed_job_active_record (~> 4.0.0) |
|
316 |
+ devise (~> 3.0.0) |
|
320 | 317 |
dotenv-rails |
321 |
- em-http-request |
|
322 |
- fastercsv |
|
323 |
- forecast_io |
|
324 |
- foreman |
|
325 |
- geokit-rails3 |
|
326 |
- jquery-rails |
|
318 |
+ em-http-request (~> 1.1.2) |
|
319 |
+ forecast_io (~> 2.0.0) |
|
320 |
+ foreman (~> 0.63.0) |
|
321 |
+ geokit (~> 1.6.7) |
|
322 |
+ geokit-rails3 (~> 0.1.5) |
|
323 |
+ hipchat (~> 1.1.0) |
|
324 |
+ jquery-rails (~> 3.0.4) |
|
327 | 325 |
json (>= 1.7.7) |
328 |
- jsonpath |
|
329 |
- kaminari |
|
330 |
- kramdown |
|
331 |
- mysql2 |
|
332 |
- nokogiri |
|
326 |
+ jsonpath (~> 0.5.3) |
|
327 |
+ kaminari (~> 0.14.1) |
|
328 |
+ kramdown (~> 1.1.0) |
|
329 |
+ mysql2 (~> 0.3.13) |
|
330 |
+ nokogiri (~> 1.6.0) |
|
333 | 331 |
pry |
334 |
- rails |
|
335 |
- rake |
|
332 |
+ rails (= 3.2.17) |
|
336 | 333 |
rr |
337 | 334 |
rspec |
338 | 335 |
rspec-rails |
339 |
- rturk |
|
340 |
- ruby-growl |
|
341 |
- rufus-scheduler |
|
336 |
+ rturk (~> 2.11.0) |
|
337 |
+ ruby-growl (~> 4.1.0) |
|
338 |
+ rufus-scheduler (~> 3.0.7) |
|
342 | 339 |
sass-rails (~> 3.2.3) |
343 |
- select2-rails |
|
340 |
+ select2-rails (~> 3.4.3) |
|
344 | 341 |
shoulda-matchers |
345 |
- system_timer |
|
346 |
- therubyracer |
|
347 |
- twilio-ruby |
|
342 |
+ therubyracer (~> 0.12.1) |
|
343 |
+ twilio-ruby (~> 3.10.0) |
|
348 | 344 |
twitter (~> 5.7.1) |
349 | 345 |
twitter-stream! |
350 |
- typhoeus |
|
346 |
+ typhoeus (~> 0.6.3) |
|
351 | 347 |
uglifier (>= 1.0.3) |
352 | 348 |
webmock |
353 |
- weibo_2 |
|
354 |
- wunderground |
|
349 |
+ weibo_2 (~> 0.1.4) |
|
350 |
+ wunderground (~> 1.1.0) |
@@ -8,4 +8,4 @@ dj: bundle exec script/delayed_job run |
||
8 | 8 |
# web: bundle exec unicorn -c config/unicorn/production.rb |
9 | 9 |
# schedule: bundle exec rails runner bin/schedule.rb |
10 | 10 |
# twitter: bundle exec rails runner bin/twitter_stream.rb |
11 |
-# dj: bundle exec script/delayed_job run |
|
11 |
+# dj: bundle exec script/delayed_job run |
@@ -24,7 +24,7 @@ Follow [@tectonic](https://twitter.com/tectonic) for updates as Huginn evolves, |
||
24 | 24 |
|
25 | 25 |
### We need your help! |
26 | 26 |
|
27 |
-Want to help with Huginn? Try tackling [issues tagged with #help-wanted](https://github.com/cantino/huginn/issues?direction=desc&labels=help-wanted&page=1&sort=created&state=open). |
|
27 |
+Want to help with Huginn? All contributions are encouraged! You could make UI improvements, add new Agents, write documentation and tutorials, or try tackling [issues tagged with #help-wanted](https://github.com/cantino/huginn/issues?direction=desc&labels=help-wanted&page=1&sort=created&state=open). |
|
28 | 28 |
|
29 | 29 |
## Examples |
30 | 30 |
|
@@ -49,44 +49,29 @@ And now, some example screenshots. Below them are instructions to get you start |
||
49 | 49 |
If you just want to play around, you can simply clone this repository, then perform the following steps: |
50 | 50 |
|
51 | 51 |
* Copy `.env.example` to `.env` (`cp .env.example .env`) and edit `.env`, at least updating the `APP_SECRET_TOKEN` variable. |
52 |
-* Run `rake db:create`, `rake db:migrate`, and then `rake db:seed` to create a development MySQL database with some example seed data. |
|
52 |
+* Run `rake db:create`, `rake db:migrate`, and then `rake db:seed` to create a development MySQL database with some example Agents. |
|
53 | 53 |
* Run `foreman start`, visit [http://localhost:3000/][localhost], and login with the username of `admin` and the password of `password`. |
54 | 54 |
* Setup some Agents! |
55 |
+* Read the [wiki][wiki] for usage examples and to get started making new Agents. |
|
55 | 56 |
|
56 | 57 |
Note: by default, emails are not sent in the `development` Rails environment, which is what you just setup. If you'd like to enable emails when playing with Huginn locally, edit `config.action_mailer.perform_deliveries` in `config/environments/development.rb`. |
57 | 58 |
|
58 | 59 |
If you need more detailed instructions, see the [Novice setup guide][novice-setup-guide]. |
59 | 60 |
|
60 | 61 |
[localhost]: http://localhost:3000/ |
62 |
+[wiki]: https://github.com/cantino/huginn/wiki |
|
61 | 63 |
[novice-setup-guide]: https://github.com/cantino/huginn/wiki/Novice-setup-guide |
62 | 64 |
|
63 |
-### Real Start |
|
64 |
- |
|
65 |
-Follow these instructions if you wish to deploy your own version of Huginn or contribute back to the project. Github doesn't make it easy to work with private forks of public repositories, so I recommend that you follow the following steps: |
|
66 |
- |
|
67 |
-* Make a public fork of Huginn. If you can't create private Github repositories, you can skip the steps below. Just follow the *Quick Start* steps above and make pull requests when you want to contribute a patch. |
|
68 |
-* Make a private, empty Github repository called `huginn-private` |
|
69 |
-* Duplicate your public fork into your new private repository (via [Github's instructions](https://help.github.com/articles/duplicating-a-repository)): |
|
70 |
- |
|
71 |
- git clone --bare git@github.com:you/huginn.git |
|
72 |
- cd huginn.git |
|
73 |
- git push --mirror git@github.com:you/huginn-private.git |
|
74 |
- cd .. && rm -rf huginn.git |
|
75 |
- |
|
76 |
-* Checkout your new private repository. |
|
77 |
-* Add your Huginn public fork as a remote to your new private repository (`huginn-private`): |
|
78 |
- |
|
79 |
- git remote add public git@github.com:you/huginn.git |
|
80 |
- |
|
81 |
-* Run the steps from *Quick Start* above to configure your copy of Huginn. |
|
82 |
-* When you want to contribute patches, do a remote push from your private repository to your public fork of the relevant commits, then make a pull request to this repository. |
|
83 |
- |
|
84 | 65 |
## Deployment |
85 | 66 |
|
86 | 67 |
Please see [the Huginn Wiki](https://github.com/cantino/huginn/wiki#deploying-huginn) for detailed deployment strategies for different providers. |
87 | 68 |
|
88 | 69 |
### Optional Setup |
89 | 70 |
|
71 |
+#### Setup for private development |
|
72 |
+ |
|
73 |
+See [private development instructions](https://github.com/cantino/huginn/wiki/Private-development-instructions) on the wiki. |
|
74 |
+ |
|
90 | 75 |
#### Enable the WeatherAgent |
91 | 76 |
|
92 | 77 |
In order to use the WeatherAgent you need an [API key with Wunderground](http://www.wunderground.com/weather/api/). Signup for one and then change value of `api_key: your-key` in your seeded WeatherAgent. |
@@ -0,0 +1,41 @@ |
||
1 |
+# This controller is designed to allow your Agents to receive cross-site Webhooks (POSTs), or to output data streams. |
|
2 |
+# When a POST or GET is received, your Agent will have #receive_web_request called on itself with the incoming params, |
|
3 |
+# method, and requested content-type. |
|
4 |
+# |
|
5 |
+# Requests are routed as follows: |
|
6 |
+# http://yourserver.com/users/:user_id/web_requests/:agent_id/:secret |
|
7 |
+# where :user_id is a User's id, :agent_id is an Agent's id, and :secret is a token that should be user-specifiable in |
|
8 |
+# an Agent that implements #receive_web_request. It is highly recommended that every Agent verify this token whenever |
|
9 |
+# #receive_web_request is called. For example, one of your Agent's options could be :secret and you could compare this |
|
10 |
+# value to params[:secret] whenever #receive_web_request is called on your Agent, rejecting invalid requests. |
|
11 |
+# |
|
12 |
+# Your Agent's #receive_web_request method should return an Array of json_or_string_response, status_code, and |
|
13 |
+# optional mime type. For example: |
|
14 |
+# [{status: "success"}, 200] |
|
15 |
+# or |
|
16 |
+# ["not found", 404, 'text/plain'] |
|
17 |
+ |
|
18 |
+class WebRequestsController < ApplicationController |
|
19 |
+ skip_before_filter :authenticate_user! |
|
20 |
+ |
|
21 |
+ def handle_request |
|
22 |
+ user = User.find_by_id(params[:user_id]) |
|
23 |
+ if user |
|
24 |
+ agent = user.agents.find_by_id(params[:agent_id]) |
|
25 |
+ if agent |
|
26 |
+ content, status, content_type = agent.trigger_web_request(params.except(:action, :controller, :agent_id, :user_id, :format), request.method_symbol.to_s, request.format.to_s) |
|
27 |
+ if content.is_a?(String) |
|
28 |
+ render :text => content, :status => status || 200, :content_type => content_type || 'text/plain' |
|
29 |
+ elsif content.is_a?(Hash) |
|
30 |
+ render :json => content, :status => status || 200 |
|
31 |
+ else |
|
32 |
+ head(status || 200) |
|
33 |
+ end |
|
34 |
+ else |
|
35 |
+ render :text => "agent not found", :status => 404 |
|
36 |
+ end |
|
37 |
+ else |
|
38 |
+ render :text => "user not found", :status => 404 |
|
39 |
+ end |
|
40 |
+ end |
|
41 |
+end |
@@ -1,39 +0,0 @@ |
||
1 |
-# This controller is designed to allow your Agents to receive cross-site Webhooks (posts). When POSTed, your Agent will |
|
2 |
-# have #receive_webhook called on itself with the POST params. |
|
3 |
-# |
|
4 |
-# Make POSTs to the following URL: |
|
5 |
-# http://yourserver.com/users/:user_id/webhooks/:agent_id/:secret |
|
6 |
-# where :user_id is your User's id, :agent_id is an Agent's id, and :secret is a token that should be |
|
7 |
-# user-specifiable in your Agent. It is highly recommended that you verify this token whenever #receive_webhook |
|
8 |
-# is called. For example, one of your Agent's options could be :secret and you could compare this value |
|
9 |
-# to params[:secret] whenever #receive_webhook is called on your Agent, rejecting invalid requests. |
|
10 |
-# |
|
11 |
-# Your Agent's #receive_webhook method should return an Array of [json_or_string_response, status_code]. For example: |
|
12 |
-# [{status: "success"}, 200] |
|
13 |
-# or |
|
14 |
-# ["not found", 404] |
|
15 |
- |
|
16 |
-class WebhooksController < ApplicationController |
|
17 |
- skip_before_filter :authenticate_user! |
|
18 |
- |
|
19 |
- def create |
|
20 |
- user = User.find_by_id(params[:user_id]) |
|
21 |
- if user |
|
22 |
- agent = user.agents.find_by_id(params[:agent_id]) |
|
23 |
- if agent |
|
24 |
- response, status = agent.trigger_webhook(params.except(:action, :controller, :agent_id, :user_id)) |
|
25 |
- if response.is_a?(String) |
|
26 |
- render :text => response, :status => status || 200 |
|
27 |
- elsif response.is_a?(Hash) |
|
28 |
- render :json => response, :status => status || 200 |
|
29 |
- else |
|
30 |
- head :ok |
|
31 |
- end |
|
32 |
- else |
|
33 |
- render :text => "agent not found", :status => :not_found |
|
34 |
- end |
|
35 |
- else |
|
36 |
- render :text => "user not found", :status => :not_found |
|
37 |
- end |
|
38 |
- end |
|
39 |
-end |
@@ -16,4 +16,19 @@ module ApplicationHelper |
||
16 | 16 |
link_to '<span class="label btn-danger">No</span>'.html_safe, agent_path(agent, :tab => (agent.recent_error_logs? ? 'logs' : 'details')) |
17 | 17 |
end |
18 | 18 |
end |
19 |
+ |
|
20 |
+ def render_dot(dot_format_string) |
|
21 |
+ if (command = ENV['USE_GRAPHVIZ_DOT']) && |
|
22 |
+ (svg = IO.popen([command, *%w[-Tsvg -q1 -o/dev/stdout /dev/stdin]], 'w+') { |dot| |
|
23 |
+ dot.print dot_format_string |
|
24 |
+ dot.close_write |
|
25 |
+ dot.read |
|
26 |
+ } rescue false) |
|
27 |
+ svg.html_safe |
|
28 |
+ else |
|
29 |
+ tag('img', src: URI('https://chart.googleapis.com/chart').tap { |uri| |
|
30 |
+ uri.query = URI.encode_www_form(cht: 'gv', chl: dot_format_string) |
|
31 |
+ }) |
|
32 |
+ end |
|
33 |
+ end |
|
19 | 34 |
end |
@@ -16,7 +16,7 @@ class Agent < ActiveRecord::Base |
||
16 | 16 |
|
17 | 17 |
load_types_in "Agents" |
18 | 18 |
|
19 |
- SCHEDULES = %w[every_2m every_5m every_10m every_30m every_1h every_2h every_5h every_12h every_1d every_2d every_7d |
|
19 |
+ SCHEDULES = %w[every_1m every_2m every_5m every_10m every_30m every_1h every_2h every_5h every_12h every_1d every_2d every_7d |
|
20 | 20 |
midnight 1am 2am 3am 4am 5am 6am 7am 8am 9am 10am 11am noon 1pm 2pm 3pm 4pm 5pm 6pm 7pm 8pm 9pm 10pm 11pm never] |
21 | 21 |
|
22 | 22 |
EVENT_RETENTION_SCHEDULES = [["Forever", 0], ["1 day", 1], *([2, 3, 4, 5, 7, 14, 21, 30, 45, 90, 180, 365].map {|n| ["#{n} days", n] })] |
@@ -73,7 +73,7 @@ class Agent < ActiveRecord::Base |
||
73 | 73 |
# Implement me in your subclass of Agent. |
74 | 74 |
end |
75 | 75 |
|
76 |
- def receive_webhook(params) |
|
76 |
+ def receive_web_request(params, method, format) |
|
77 | 77 |
# Implement me in your subclass of Agent. |
78 | 78 |
["not implemented", 404] |
79 | 79 |
end |
@@ -83,10 +83,6 @@ class Agent < ActiveRecord::Base |
||
83 | 83 |
raise "Implement me in your subclass" |
84 | 84 |
end |
85 | 85 |
|
86 |
- def validate_options |
|
87 |
- # Implement me in your subclass to test for valid options. |
|
88 |
- end |
|
89 |
- |
|
90 | 86 |
def event_created_within?(days) |
91 | 87 |
last_event_at && last_event_at > days.to_i.days.ago |
92 | 88 |
end |
@@ -136,10 +132,18 @@ class Agent < ActiveRecord::Base |
||
136 | 132 |
message.gsub(/<([^>]+)>/) { Utils.value_at(payload, $1) || "??" } |
137 | 133 |
end |
138 | 134 |
|
139 |
- def trigger_webhook(params) |
|
140 |
- receive_webhook(params).tap do |
|
141 |
- self.last_webhook_at = Time.now |
|
142 |
- save! |
|
135 |
+ def trigger_web_request(params, method, format) |
|
136 |
+ if respond_to?(:receive_webhook) |
|
137 |
+ Rails.logger.warn "DEPRECATED: The .receive_webhook method is deprecated, please switch your Agent to use .receive_web_request." |
|
138 |
+ receive_webhook(params).tap do |
|
139 |
+ self.last_web_request_at = Time.now |
|
140 |
+ save! |
|
141 |
+ end |
|
142 |
+ else |
|
143 |
+ receive_web_request(params, method, format).tap do |
|
144 |
+ self.last_web_request_at = Time.now |
|
145 |
+ save! |
|
146 |
+ end |
|
143 | 147 |
end |
144 | 148 |
end |
145 | 149 |
|
@@ -185,17 +189,7 @@ class Agent < ActiveRecord::Base |
||
185 | 189 |
update_column :last_error_log_at, nil |
186 | 190 |
end |
187 | 191 |
|
188 |
- # Validations and Callbacks |
|
189 |
- |
|
190 |
- def sources_are_owned |
|
191 |
- errors.add(:sources, "must be owned by you") unless sources.all? {|s| s.user == user } |
|
192 |
- end |
|
193 |
- |
|
194 |
- def validate_schedule |
|
195 |
- unless cannot_be_scheduled? |
|
196 |
- errors.add(:schedule, "is not a valid schedule") unless SCHEDULES.include?(schedule.to_s) |
|
197 |
- end |
|
198 |
- end |
|
192 |
+ # Callbacks |
|
199 | 193 |
|
200 | 194 |
def set_default_schedule |
201 | 195 |
self.schedule = default_schedule unless schedule.present? || cannot_be_scheduled? |
@@ -214,6 +208,24 @@ class Agent < ActiveRecord::Base |
||
214 | 208 |
def possibly_update_event_expirations |
215 | 209 |
update_event_expirations! if keep_events_for_changed? |
216 | 210 |
end |
211 |
+ |
|
212 |
+ #Validation Methods |
|
213 |
+ |
|
214 |
+ private |
|
215 |
+ |
|
216 |
+ def sources_are_owned |
|
217 |
+ errors.add(:sources, "must be owned by you") unless sources.all? {|s| s.user == user } |
|
218 |
+ end |
|
219 |
+ |
|
220 |
+ def validate_schedule |
|
221 |
+ unless cannot_be_scheduled? |
|
222 |
+ errors.add(:schedule, "is not a valid schedule") unless SCHEDULES.include?(schedule.to_s) |
|
223 |
+ end |
|
224 |
+ end |
|
225 |
+ |
|
226 |
+ def validate_options |
|
227 |
+ # Implement me in your subclass to test for valid options. |
|
228 |
+ end |
|
217 | 229 |
|
218 | 230 |
# Class Methods |
219 | 231 |
|
@@ -0,0 +1,90 @@ |
||
1 |
+module Agents |
|
2 |
+ class BasecampAgent < Agent |
|
3 |
+ cannot_receive_events! |
|
4 |
+ |
|
5 |
+ description <<-MD |
|
6 |
+ The BasecampAgent checks a Basecamp project for new Events |
|
7 |
+ |
|
8 |
+ It is required that you enter your Basecamp credentials (`username` and `password`). |
|
9 |
+ |
|
10 |
+ You also need to provide your Basecamp `user_id` and the `project_id` of the project you want to monitor. |
|
11 |
+ If you have your Basecamp project opened in your browser you can find the user_id and project_id as follows: |
|
12 |
+ |
|
13 |
+ `https://basecamp.com/` |
|
14 |
+ user_id |
|
15 |
+ `/projects/` |
|
16 |
+ project_id |
|
17 |
+ `-explore-basecamp` |
|
18 |
+ MD |
|
19 |
+ |
|
20 |
+ event_description <<-MD |
|
21 |
+ Events are the raw JSON provided by the Basecamp API. Should look something like: |
|
22 |
+ |
|
23 |
+ { |
|
24 |
+ "creator": { |
|
25 |
+ "fullsize_avatar_url": "https://dge9rmgqjs8m1.cloudfront.net/global/dfsdfsdfdsf/original.gif?r=3", |
|
26 |
+ "avatar_url": "http://dge9rmgqjs8m1.cloudfront.net/global/dfsdfsdfdsf/avatar.gif?r=3", |
|
27 |
+ "name": "Dominik Sander", |
|
28 |
+ "id": 123456 |
|
29 |
+ }, |
|
30 |
+ "attachments": [], |
|
31 |
+ "raw_excerpt": "test test", |
|
32 |
+ "excerpt": "test test", |
|
33 |
+ "id": 6454342343, |
|
34 |
+ "created_at": "2014-04-17T10:25:31.000+02:00", |
|
35 |
+ "updated_at": "2014-04-17T10:25:31.000+02:00", |
|
36 |
+ "summary": "commented on whaat", |
|
37 |
+ "action": "commented on", |
|
38 |
+ "target": "whaat", |
|
39 |
+ "url": "https://basecamp.com/12456/api/v1/projects/76454545-explore-basecamp/messages/76454545-whaat.json", |
|
40 |
+ "html_url": "https://basecamp.com/12456/projects/76454545-explore-basecamp/messages/76454545-whaat#comment_76454545" |
|
41 |
+ } |
|
42 |
+ MD |
|
43 |
+ |
|
44 |
+ default_schedule "every_10m" |
|
45 |
+ |
|
46 |
+ def default_options |
|
47 |
+ { |
|
48 |
+ 'username' => '', |
|
49 |
+ 'password' => '', |
|
50 |
+ 'user_id' => '', |
|
51 |
+ 'project_id' => '', |
|
52 |
+ } |
|
53 |
+ end |
|
54 |
+ |
|
55 |
+ def validate_options |
|
56 |
+ errors.add(:base, "you need to specify your basecamp username") unless options['username'].present? |
|
57 |
+ errors.add(:base, "you need to specify your basecamp password") unless options['password'].present? |
|
58 |
+ errors.add(:base, "you need to specify your basecamp user id") unless options['user_id'].present? |
|
59 |
+ errors.add(:base, "you need to specify the basecamp project id of which you want to receive events") unless options['project_id'].present? |
|
60 |
+ end |
|
61 |
+ |
|
62 |
+ def working? |
|
63 |
+ (events_count.present? && events_count > 0) |
|
64 |
+ end |
|
65 |
+ |
|
66 |
+ def check |
|
67 |
+ reponse = HTTParty.get request_url, request_options.merge(query_parameters) |
|
68 |
+ memory[:last_run] = Time.now.utc.iso8601 |
|
69 |
+ if last_check_at != nil |
|
70 |
+ JSON.parse(reponse.body).each do |event| |
|
71 |
+ create_event :payload => event |
|
72 |
+ end |
|
73 |
+ end |
|
74 |
+ save! |
|
75 |
+ end |
|
76 |
+ |
|
77 |
+ private |
|
78 |
+ def request_url |
|
79 |
+ "https://basecamp.com/#{URI.encode(options[:user_id].to_s)}/api/v1/projects/#{URI.encode(options[:project_id].to_s)}/events.json" |
|
80 |
+ end |
|
81 |
+ |
|
82 |
+ def request_options |
|
83 |
+ {:basic_auth => {:username =>options[:username], :password=>options[:password]}, :headers => {"User-Agent" => "Huginn (https://github.com/cantino/huginn)"}} |
|
84 |
+ end |
|
85 |
+ |
|
86 |
+ def query_parameters |
|
87 |
+ memory[:last_run].present? ? { :query => {:since => memory[:last_run]} } : {} |
|
88 |
+ end |
|
89 |
+ end |
|
90 |
+end |
@@ -0,0 +1,132 @@ |
||
1 |
+module Agents |
|
2 |
+ class DataOutputAgent < Agent |
|
3 |
+ cannot_be_scheduled! |
|
4 |
+ |
|
5 |
+ description do |
|
6 |
+ <<-MD |
|
7 |
+ The Agent outputs received events as either RSS or JSON. Use it to output a public or private stream of Huginn data. |
|
8 |
+ |
|
9 |
+ This Agent will output data at: |
|
10 |
+ |
|
11 |
+ `https://#{ENV['DOMAIN']}/users/#{user.id}/web_requests/#{id || '<id>'}/:secret.xml` |
|
12 |
+ |
|
13 |
+ where `:secret` is one of the allowed secrets specified in your options and the extension can be `xml` or `json`. |
|
14 |
+ |
|
15 |
+ You can setup multiple secrets so that you can individually authorize external systems to |
|
16 |
+ access your Huginn data. |
|
17 |
+ |
|
18 |
+ Options: |
|
19 |
+ |
|
20 |
+ * `secrets` - An array of tokens that the requestor must provide for light-weight authentication. |
|
21 |
+ * `expected_receive_period_in_days` - How often you expect data to be received by this Agent from other Agents. |
|
22 |
+ * `template` - A JSON object representing a mapping between item output keys and incoming event JSONPath values. JSONPath values must start with `$`, or can be interpolated between `<` and `>` characters. The `item` key will be repeated for every Event. |
|
23 |
+ MD |
|
24 |
+ end |
|
25 |
+ |
|
26 |
+ def default_options |
|
27 |
+ { |
|
28 |
+ "secrets" => ["a-secret-key"], |
|
29 |
+ "expected_receive_period_in_days" => 2, |
|
30 |
+ "template" => { |
|
31 |
+ "title" => "XKCD comics as a feed", |
|
32 |
+ "description" => "This is a feed of recent XKCD comics, generated by Huginn", |
|
33 |
+ "item" => { |
|
34 |
+ "title" => "$.title", |
|
35 |
+ "description" => "Secret hovertext: <$.hovertext>", |
|
36 |
+ "link" => "$.url", |
|
37 |
+ } |
|
38 |
+ } |
|
39 |
+ } |
|
40 |
+ end |
|
41 |
+ |
|
42 |
+ #"guid" => "", |
|
43 |
+ # "pubDate" => "" |
|
44 |
+ |
|
45 |
+ def working? |
|
46 |
+ last_receive_at && last_receive_at > options['expected_receive_period_in_days'].to_i.days.ago && !recent_error_logs? |
|
47 |
+ end |
|
48 |
+ |
|
49 |
+ def validate_options |
|
50 |
+ unless options['secrets'].is_a?(Array) && options['secrets'].length > 0 |
|
51 |
+ errors.add(:base, "Please specify one or more secrets for 'authenticating' incoming feed requests") |
|
52 |
+ end |
|
53 |
+ unless options['expected_receive_period_in_days'].present? && options['expected_receive_period_in_days'].to_i > 0 |
|
54 |
+ errors.add(:base, "Please provide 'expected_receive_period_in_days' to indicate how many days can pass before this Agent is considered to be not working") |
|
55 |
+ end |
|
56 |
+ |
|
57 |
+ unless options['template'].present? && options['template']['item'].present? && options['template']['item'].is_a?(Hash) |
|
58 |
+ errors.add(:base, "Please provide template and template.item") |
|
59 |
+ end |
|
60 |
+ end |
|
61 |
+ |
|
62 |
+ def events_to_show |
|
63 |
+ (options['events_to_show'].presence || 40).to_i |
|
64 |
+ end |
|
65 |
+ |
|
66 |
+ def feed_ttl |
|
67 |
+ (options['ttl'].presence || 60).to_i |
|
68 |
+ end |
|
69 |
+ |
|
70 |
+ def feed_title |
|
71 |
+ options['template']['title'].presence || "#{name} Event Feed" |
|
72 |
+ end |
|
73 |
+ |
|
74 |
+ def feed_link |
|
75 |
+ options['template']['link'].presence || "https://#{ENV['DOMAIN']}" |
|
76 |
+ end |
|
77 |
+ |
|
78 |
+ def feed_description |
|
79 |
+ options['template']['description'].presence || "A feed of Events received by the '#{name}' Huginn Agent" |
|
80 |
+ end |
|
81 |
+ |
|
82 |
+ def receive_web_request(params, method, format) |
|
83 |
+ if options['secrets'].include?(params['secret']) |
|
84 |
+ items = received_events.order('id desc').limit(events_to_show).map do |event| |
|
85 |
+ interpolated = Utils.recursively_interpolate_jsonpaths(options['template']['item'], event.payload, :leading_dollarsign_is_jsonpath => true) |
|
86 |
+ interpolated['guid'] = event.id |
|
87 |
+ interpolated['pubDate'] = event.created_at.rfc2822.to_s |
|
88 |
+ interpolated |
|
89 |
+ end |
|
90 |
+ |
|
91 |
+ if format =~ /json/ |
|
92 |
+ content = { |
|
93 |
+ 'title' => feed_title, |
|
94 |
+ 'description' => feed_description, |
|
95 |
+ 'pubDate' => Time.now, |
|
96 |
+ 'items' => items |
|
97 |
+ } |
|
98 |
+ |
|
99 |
+ return [content, 200] |
|
100 |
+ else |
|
101 |
+ content = Utils.unindent(<<-XML) |
|
102 |
+ <?xml version="1.0" encoding="UTF-8" ?> |
|
103 |
+ <rss version="2.0"> |
|
104 |
+ <channel> |
|
105 |
+ <title>#{feed_title.encode(:xml => :text)}</title> |
|
106 |
+ <description>#{feed_description.encode(:xml => :text)}</description> |
|
107 |
+ <link>#{feed_link.encode(:xml => :text)}</link> |
|
108 |
+ <lastBuildDate>#{Time.now.rfc2822.to_s.encode(:xml => :text)}</lastBuildDate> |
|
109 |
+ <pubDate>#{Time.now.rfc2822.to_s.encode(:xml => :text)}</pubDate> |
|
110 |
+ <ttl>#{feed_ttl}</ttl> |
|
111 |
+ |
|
112 |
+ XML |
|
113 |
+ |
|
114 |
+ content += items.to_xml(:skip_types => true, :root => "items", :skip_instruct => true, :indent => 1).gsub(/^<\/?items>/, '').strip |
|
115 |
+ |
|
116 |
+ content += Utils.unindent(<<-XML) |
|
117 |
+ </channel> |
|
118 |
+ </rss> |
|
119 |
+ XML |
|
120 |
+ |
|
121 |
+ return [content, 200, 'text/xml'] |
|
122 |
+ end |
|
123 |
+ else |
|
124 |
+ if format =~ /json/ |
|
125 |
+ return [{ :error => "Not Authorized" }, 401] |
|
126 |
+ else |
|
127 |
+ return ["Not Authorized", 401] |
|
128 |
+ end |
|
129 |
+ end |
|
130 |
+ end |
|
131 |
+ end |
|
132 |
+end |
@@ -12,6 +12,10 @@ module Agents |
||
12 | 12 |
"celsius": "18", |
13 | 13 |
"fahreinheit": "64" |
14 | 14 |
}, |
15 |
+ "date": { |
|
16 |
+ "epoch": "1357959600", |
|
17 |
+ "pretty": "10:00 PM EST on January 11, 2013" |
|
18 |
+ }, |
|
15 | 19 |
"conditions": "Rain showers", |
16 | 20 |
"data": "This is some data" |
17 | 21 |
} |
@@ -33,6 +37,33 @@ module Agents |
||
33 | 37 |
"subject": "This is some data" |
34 | 38 |
} |
35 | 39 |
|
40 |
+ In `matchers` setting you can perform regular expression matching against contents of events and expand the match data for use in `instructions` setting. Here is an example: |
|
41 |
+ |
|
42 |
+ { |
|
43 |
+ "matchers": [ |
|
44 |
+ { |
|
45 |
+ "path": "$.date.pretty", |
|
46 |
+ "regexp": "\\A(?<time>\\d\\d:\\d\\d [AP]M [A-Z]+)", |
|
47 |
+ "to": "pretty_date", |
|
48 |
+ } |
|
49 |
+ ] |
|
50 |
+ } |
|
51 |
+ |
|
52 |
+ This virtually merges the following hash into the original event hash: |
|
53 |
+ |
|
54 |
+ "pretty_date": { |
|
55 |
+ "time": "10:00 PM EST", |
|
56 |
+ "0": "10:00 PM EST on January 11, 2013" |
|
57 |
+ "1": "10:00 PM EST", |
|
58 |
+ } |
|
59 |
+ |
|
60 |
+ So you can use it in `instructions` like this: |
|
61 |
+ |
|
62 |
+ "instructions": { |
|
63 |
+ "message": "Today's conditions look like <$.conditions> with a high temperature of <$.high.celsius> degrees Celsius according to the forecast at <$.pretty_date.time>.", |
|
64 |
+ "subject": "$.data" |
|
65 |
+ } |
|
66 |
+ |
|
36 | 67 |
If you want to retain original contents of events and only add new keys, then set `mode` to `merge`, otherwise set it to `clean`. |
37 | 68 |
|
38 | 69 |
By default, the output event will have `agent` and `created_at` fields added as well, reflecting the original Agent type and Event creation time. You can skip these outputs by setting `skip_agent` and `skip_created_at` to `true`. |
@@ -46,8 +77,12 @@ module Agents |
||
46 | 77 |
|
47 | 78 |
event_description "User defined" |
48 | 79 |
|
80 |
+ after_save :clear_matchers |
|
81 |
+ |
|
49 | 82 |
def validate_options |
50 | 83 |
errors.add(:base, "instructions, mode, skip_agent, and skip_created_at all need to be present.") unless options['instructions'].present? and options['mode'].present? and options['skip_agent'].present? and options['skip_created_at'].present? |
84 |
+ |
|
85 |
+ validate_matchers |
|
51 | 86 |
end |
52 | 87 |
|
53 | 88 |
def default_options |
@@ -56,6 +91,7 @@ module Agents |
||
56 | 91 |
'message' => "You received a text <$.text> from <$.fields.from>", |
57 | 92 |
'some_other_field' => "Looks like the weather is going to be <$.fields.weather>" |
58 | 93 |
}, |
94 |
+ 'matchers' => [], |
|
59 | 95 |
'mode' => "clean", |
60 | 96 |
'skip_agent' => "false", |
61 | 97 |
'skip_created_at' => "false" |
@@ -68,12 +104,92 @@ module Agents |
||
68 | 104 |
|
69 | 105 |
def receive(incoming_events) |
70 | 106 |
incoming_events.each do |event| |
71 |
- formatted_event = options['mode'].to_s == "merge" ? event.payload : {} |
|
72 |
- options['instructions'].each_pair {|key, value| formatted_event[key] = Utils.interpolate_jsonpaths(value, event.payload) } |
|
107 |
+ formatted_event = options['mode'].to_s == "merge" ? event.payload.dup : {} |
|
108 |
+ payload = perform_matching(event.payload) |
|
109 |
+ options['instructions'].each_pair {|key, value| formatted_event[key] = Utils.interpolate_jsonpaths(value, payload) } |
|
73 | 110 |
formatted_event['agent'] = Agent.find(event.agent_id).type.slice!(8..-1) unless options['skip_agent'].to_s == "true" |
74 | 111 |
formatted_event['created_at'] = event.created_at unless options['skip_created_at'].to_s == "true" |
75 | 112 |
create_event :payload => formatted_event |
76 | 113 |
end |
77 | 114 |
end |
115 |
+ |
|
116 |
+ private |
|
117 |
+ |
|
118 |
+ def validate_matchers |
|
119 |
+ matchers = options['matchers'] or return |
|
120 |
+ |
|
121 |
+ unless matchers.is_a?(Array) |
|
122 |
+ errors.add(:base, "matchers must be an array if present") |
|
123 |
+ return |
|
124 |
+ end |
|
125 |
+ |
|
126 |
+ matchers.each do |matcher| |
|
127 |
+ unless matcher.is_a?(Hash) |
|
128 |
+ errors.add(:base, "each matcher must be a hash") |
|
129 |
+ next |
|
130 |
+ end |
|
131 |
+ |
|
132 |
+ regexp, path, to = matcher.values_at(*%w[regexp path to]) |
|
133 |
+ |
|
134 |
+ if regexp.present? |
|
135 |
+ begin |
|
136 |
+ Regexp.new(regexp) |
|
137 |
+ rescue |
|
138 |
+ errors.add(:base, "bad regexp found in matchers: #{regexp}") |
|
139 |
+ end |
|
140 |
+ else |
|
141 |
+ errors.add(:base, "regexp is mandatory for a matcher and must be a string") |
|
142 |
+ end |
|
143 |
+ |
|
144 |
+ errors.add(:base, "path is mandatory for a matcher and must be a string") if !path.present? |
|
145 |
+ |
|
146 |
+ errors.add(:base, "to must be a string if present in a matcher") if to.present? && !to.is_a?(String) |
|
147 |
+ end |
|
148 |
+ end |
|
149 |
+ |
|
150 |
+ def perform_matching(payload) |
|
151 |
+ matchers.inject(payload.dup) { |hash, matcher| |
|
152 |
+ matcher[hash] |
|
153 |
+ } |
|
154 |
+ end |
|
155 |
+ |
|
156 |
+ def matchers |
|
157 |
+ @matchers ||= |
|
158 |
+ if matchers = options['matchers'] |
|
159 |
+ matchers.map { |matcher| |
|
160 |
+ regexp, path, to = matcher.values_at(*%w[regexp path to]) |
|
161 |
+ re = Regexp.new(regexp) |
|
162 |
+ proc { |hash| |
|
163 |
+ mhash = {} |
|
164 |
+ value = Utils.value_at(hash, path) |
|
165 |
+ if value.is_a?(String) && (m = re.match(value)) |
|
166 |
+ m.to_a.each_with_index { |s, i| |
|
167 |
+ mhash[i.to_s] = s |
|
168 |
+ } |
|
169 |
+ m.names.each do |name| |
|
170 |
+ mhash[name] = m[name] |
|
171 |
+ end if m.respond_to?(:names) |
|
172 |
+ end |
|
173 |
+ if to |
|
174 |
+ case value = hash[to] |
|
175 |
+ when Hash |
|
176 |
+ value.update(mhash) |
|
177 |
+ else |
|
178 |
+ hash[to] = mhash |
|
179 |
+ end |
|
180 |
+ else |
|
181 |
+ hash.update(mhash) |
|
182 |
+ end |
|
183 |
+ hash |
|
184 |
+ } |
|
185 |
+ } |
|
186 |
+ else |
|
187 |
+ [] |
|
188 |
+ end |
|
189 |
+ end |
|
190 |
+ |
|
191 |
+ def clear_matchers |
|
192 |
+ @matchers = nil |
|
193 |
+ end |
|
78 | 194 |
end |
79 |
-end |
|
195 |
+end |
@@ -0,0 +1,214 @@ |
||
1 |
+require 'net/ftp' |
|
2 |
+require 'uri' |
|
3 |
+require 'time' |
|
4 |
+ |
|
5 |
+module Agents |
|
6 |
+ class FtpsiteAgent < Agent |
|
7 |
+ cannot_receive_events! |
|
8 |
+ |
|
9 |
+ default_schedule "every_12h" |
|
10 |
+ |
|
11 |
+ description <<-MD |
|
12 |
+ The FtpsiteAgent checks a FTP site and creates Events based on newly uploaded files in a directory. |
|
13 |
+ |
|
14 |
+ Specify a `url` that represents a directory of an FTP site to watch, and a list of `patterns` to match against file names. |
|
15 |
+ |
|
16 |
+ Login credentials can be included in `url` if authentication is required. |
|
17 |
+ |
|
18 |
+ Only files with a last modification time later than the `after` value, if specifed, are notified. |
|
19 |
+ MD |
|
20 |
+ |
|
21 |
+ event_description <<-MD |
|
22 |
+ Events look like this: |
|
23 |
+ |
|
24 |
+ { |
|
25 |
+ "url": "ftp://example.org/pub/releases/foo-1.2.tar.gz", |
|
26 |
+ "filename": "foo-1.2.tar.gz", |
|
27 |
+ "timestamp": "2014-04-10T22:50:00Z" |
|
28 |
+ } |
|
29 |
+ MD |
|
30 |
+ |
|
31 |
+ def working? |
|
32 |
+ event_created_within?(options['expected_update_period_in_days']) && !recent_error_logs? |
|
33 |
+ end |
|
34 |
+ |
|
35 |
+ def default_options |
|
36 |
+ { |
|
37 |
+ 'expected_update_period_in_days' => "1", |
|
38 |
+ 'url' => "ftp://example.org/pub/releases/", |
|
39 |
+ 'patterns' => [ |
|
40 |
+ 'foo-*.tar.gz', |
|
41 |
+ ], |
|
42 |
+ 'after' => Time.now.iso8601, |
|
43 |
+ } |
|
44 |
+ end |
|
45 |
+ |
|
46 |
+ def validate_options |
|
47 |
+ # Check for required fields |
|
48 |
+ begin |
|
49 |
+ url = options['url'] |
|
50 |
+ String === url or raise |
|
51 |
+ uri = URI(url) |
|
52 |
+ URI::FTP === uri or raise |
|
53 |
+ errors.add(:base, "url must end with a slash") unless uri.path.end_with?('/') |
|
54 |
+ rescue |
|
55 |
+ errors.add(:base, "url must be a valid FTP URL") |
|
56 |
+ end |
|
57 |
+ |
|
58 |
+ patterns = options['patterns'] |
|
59 |
+ case patterns |
|
60 |
+ when Array |
|
61 |
+ if patterns.empty? |
|
62 |
+ errors.add(:base, "patterns must not be empty") |
|
63 |
+ end |
|
64 |
+ when nil, '' |
|
65 |
+ errors.add(:base, "patterns must be specified") |
|
66 |
+ else |
|
67 |
+ errors.add(:base, "patterns must be an array") |
|
68 |
+ end |
|
69 |
+ |
|
70 |
+ # Check for optional fields |
|
71 |
+ if (timestamp = options['timestamp']).present? |
|
72 |
+ begin |
|
73 |
+ Time.parse(timestamp) |
|
74 |
+ rescue |
|
75 |
+ errors.add(:base, "timestamp cannot be parsed as time") |
|
76 |
+ end |
|
77 |
+ end |
|
78 |
+ |
|
79 |
+ if options['expected_update_period_in_days'].present? |
|
80 |
+ errors.add(:base, "Invalid expected_update_period_in_days format") unless is_positive_integer?(options['expected_update_period_in_days']) |
|
81 |
+ end |
|
82 |
+ end |
|
83 |
+ |
|
84 |
+ def check |
|
85 |
+ saving_entries do |found| |
|
86 |
+ each_entry { |filename, mtime| |
|
87 |
+ found[filename, mtime] |
|
88 |
+ } |
|
89 |
+ end |
|
90 |
+ end |
|
91 |
+ |
|
92 |
+ def each_entry |
|
93 |
+ patterns = options['patterns'] |
|
94 |
+ |
|
95 |
+ after = |
|
96 |
+ if str = options['after'] |
|
97 |
+ Time.parse(str) |
|
98 |
+ else |
|
99 |
+ Time.at(0) |
|
100 |
+ end |
|
101 |
+ |
|
102 |
+ open_ftp(base_uri) do |ftp| |
|
103 |
+ log "Listing the directory" |
|
104 |
+ # Do not use a block style call because we need to call other |
|
105 |
+ # commands during iteration. |
|
106 |
+ list = ftp.list('-a') |
|
107 |
+ |
|
108 |
+ month2year = {} |
|
109 |
+ |
|
110 |
+ list.each do |line| |
|
111 |
+ mon, day, smtn, rest = line.split(' ', 9)[5..-1] |
|
112 |
+ |
|
113 |
+ # Remove symlink target part if any |
|
114 |
+ filename = rest[/\A(.+?)(?:\s+->\s|\z)/, 1] |
|
115 |
+ |
|
116 |
+ patterns.any? { |pattern| |
|
117 |
+ File.fnmatch?(pattern, filename) |
|
118 |
+ } or next |
|
119 |
+ |
|
120 |
+ case smtn |
|
121 |
+ when /:/ |
|
122 |
+ if year = month2year[mon] |
|
123 |
+ mtime = Time.parse("#{mon} #{day} #{year} #{smtn} GMT") |
|
124 |
+ else |
|
125 |
+ log "Getting mtime of #{filename}" |
|
126 |
+ mtime = ftp.mtime(filename) |
|
127 |
+ month2year[mon] = mtime.year |
|
128 |
+ end |
|
129 |
+ else |
|
130 |
+ # Do not bother calling MDTM for old files. Losing the |
|
131 |
+ # time part only makes a timestamp go backwards, meaning |
|
132 |
+ # that it will trigger no new event. |
|
133 |
+ mtime = Time.parse("#{mon} #{day} #{smtn} GMT") |
|
134 |
+ end |
|
135 |
+ |
|
136 |
+ after < mtime or next |
|
137 |
+ |
|
138 |
+ yield filename, mtime |
|
139 |
+ end |
|
140 |
+ end |
|
141 |
+ end |
|
142 |
+ |
|
143 |
+ def open_ftp(uri) |
|
144 |
+ ftp = Net::FTP.new |
|
145 |
+ |
|
146 |
+ log "Connecting to #{uri.host}#{':%d' % uri.port if uri.port != uri.default_port}" |
|
147 |
+ ftp.connect(uri.host, uri.port) |
|
148 |
+ |
|
149 |
+ user = |
|
150 |
+ if str = uri.user |
|
151 |
+ URI.decode(str) |
|
152 |
+ else |
|
153 |
+ 'anonymous' |
|
154 |
+ end |
|
155 |
+ password = |
|
156 |
+ if str = uri.password |
|
157 |
+ URI.decode(str) |
|
158 |
+ else |
|
159 |
+ 'anonymous@' |
|
160 |
+ end |
|
161 |
+ log "Logging in as #{user}" |
|
162 |
+ ftp.login(user, password) |
|
163 |
+ |
|
164 |
+ ftp.passive = true |
|
165 |
+ |
|
166 |
+ path = uri.path.chomp('/') |
|
167 |
+ log "Changing directory to #{path}" |
|
168 |
+ ftp.chdir(path) |
|
169 |
+ |
|
170 |
+ yield ftp |
|
171 |
+ ensure |
|
172 |
+ log "Closing the connection" |
|
173 |
+ ftp.close |
|
174 |
+ end |
|
175 |
+ |
|
176 |
+ def base_uri |
|
177 |
+ @base_uri ||= URI(options['url']) |
|
178 |
+ end |
|
179 |
+ |
|
180 |
+ def saving_entries |
|
181 |
+ known_entries = memory['known_entries'] || {} |
|
182 |
+ found_entries = {} |
|
183 |
+ new_files = [] |
|
184 |
+ |
|
185 |
+ yield proc { |filename, mtime| |
|
186 |
+ found_entries[filename] = misotime = mtime.utc.iso8601 |
|
187 |
+ unless (prev = known_entries[filename]) && misotime <= prev |
|
188 |
+ new_files << filename |
|
189 |
+ end |
|
190 |
+ } |
|
191 |
+ |
|
192 |
+ new_files.sort_by { |filename| |
|
193 |
+ found_entries[filename] |
|
194 |
+ }.each { |filename| |
|
195 |
+ create_event :payload => { |
|
196 |
+ 'url' => (base_uri + filename).to_s, |
|
197 |
+ 'filename' => filename, |
|
198 |
+ 'timestamp' => found_entries[filename], |
|
199 |
+ } |
|
200 |
+ } |
|
201 |
+ |
|
202 |
+ memory['known_entries'] = found_entries |
|
203 |
+ save! |
|
204 |
+ end |
|
205 |
+ |
|
206 |
+ private |
|
207 |
+ |
|
208 |
+ def is_positive_integer?(value) |
|
209 |
+ Integer(value) >= 0 |
|
210 |
+ rescue |
|
211 |
+ false |
|
212 |
+ end |
|
213 |
+ end |
|
214 |
+end |
@@ -0,0 +1,76 @@ |
||
1 |
+module Agents |
|
2 |
+ class HipchatAgent < Agent |
|
3 |
+ cannot_be_scheduled! |
|
4 |
+ cannot_create_events! |
|
5 |
+ |
|
6 |
+ description <<-MD |
|
7 |
+ The HipchatAgent sends messages to a Hipchat Room |
|
8 |
+ |
|
9 |
+ To authenticate you need to set the `auth_token`, you can get one at your Hipchat Group Admin page which you can find here: |
|
10 |
+ |
|
11 |
+ `https://`yoursubdomain`.hipchat.com/admin/api` |
|
12 |
+ |
|
13 |
+ Change the `room_name` to the name of the room you want to send notifications to. |
|
14 |
+ |
|
15 |
+ You can provide a `username` and a `message`. When sending a HTML formatted message change `format` to "html". |
|
16 |
+ If you want your message to notify the room members change `notify` to "true". |
|
17 |
+ Modify the background color of your message via the `color` attribute (one of "yellow", "red", "green", "purple", "gray", or "random") |
|
18 |
+ |
|
19 |
+ If you want to specify either of those attributes per event, you can provide a [JSONPath](http://goessner.net/articles/JsonPath/) for each of them (except the `auth_token`). |
|
20 |
+ MD |
|
21 |
+ |
|
22 |
+ def default_options |
|
23 |
+ { |
|
24 |
+ 'auth_token' => '', |
|
25 |
+ 'room_name' => '', |
|
26 |
+ 'room_name_path' => '', |
|
27 |
+ 'username' => "Huginn", |
|
28 |
+ 'username_path' => '', |
|
29 |
+ 'message' => "Hello from Huginn!", |
|
30 |
+ 'message_path' => '', |
|
31 |
+ 'notify' => false, |
|
32 |
+ 'notify_path' => '', |
|
33 |
+ 'color' => 'yellow', |
|
34 |
+ 'color_path' => '', |
|
35 |
+ } |
|
36 |
+ end |
|
37 |
+ |
|
38 |
+ def validate_options |
|
39 |
+ errors.add(:base, "you need to specify a hipchat auth_token") unless options['auth_token'].present? |
|
40 |
+ errors.add(:base, "you need to specify a room_name or a room_name_path") if options['room_name'].blank? && options['room_name_path'].blank? |
|
41 |
+ end |
|
42 |
+ |
|
43 |
+ def working? |
|
44 |
+ (last_receive_at.present? && last_error_log_at.nil?) || (last_receive_at.present? && last_error_log_at.present? && last_receive_at > last_error_log_at) |
|
45 |
+ end |
|
46 |
+ |
|
47 |
+ def receive(incoming_events) |
|
48 |
+ client = HipChat::Client.new(options[:auth_token]) |
|
49 |
+ incoming_events.each do |event| |
|
50 |
+ mo = merge_options event |
|
51 |
+ client[mo[:room_name]].send(mo[:username], mo[:message], :notify => mo[:notify].to_s == 'true' ? 1 : 0, :color => mo[:color]) |
|
52 |
+ end |
|
53 |
+ end |
|
54 |
+ |
|
55 |
+ private |
|
56 |
+ def select_option(event, a) |
|
57 |
+ if options[a.to_s + '_path'].present? |
|
58 |
+ Utils.value_at(event.payload, options[a.to_s + '_path']) |
|
59 |
+ else |
|
60 |
+ options[a] |
|
61 |
+ end |
|
62 |
+ end |
|
63 |
+ |
|
64 |
+ def options_with_path |
|
65 |
+ [:room_name, :username, :message, :notify, :color] |
|
66 |
+ end |
|
67 |
+ |
|
68 |
+ def merge_options event |
|
69 |
+ options.select { |k, v| options_with_path.include? k}.tap do |merged_options| |
|
70 |
+ options_with_path.each do |a| |
|
71 |
+ merged_options[a] = select_option(event, a) |
|
72 |
+ end |
|
73 |
+ end |
|
74 |
+ end |
|
75 |
+ end |
|
76 |
+end |
@@ -1,10 +1,15 @@ |
||
1 | 1 |
module Agents |
2 | 2 |
class PostAgent < Agent |
3 |
- cannot_be_scheduled! |
|
4 | 3 |
cannot_create_events! |
5 | 4 |
|
5 |
+ default_schedule "never" |
|
6 |
+ |
|
6 | 7 |
description <<-MD |
7 |
- Post Agent receives events from other agents and send those events as the contents of a post request to a specified url. `post_url` field must specify where you would like to receive post requests and do not forget to include URI scheme (`http` or `https`) |
|
8 |
+ A PostAgent receives events from other agents (or runs periodically), merges those events with the contents of `payload`, and sends the results as POST (or GET) requests to a specified url. |
|
9 |
+ |
|
10 |
+ The `post_url` field must specify where you would like to send requests. Please include the URI scheme (`http` or `https`). |
|
11 |
+ |
|
12 |
+ The `headers` field is optional. When present, it should be a hash of headers to send with the request. |
|
8 | 13 |
MD |
9 | 14 |
|
10 | 15 |
event_description "Does not produce events." |
@@ -12,7 +17,12 @@ module Agents |
||
12 | 17 |
def default_options |
13 | 18 |
{ |
14 | 19 |
'post_url' => "http://www.example.com", |
15 |
- 'expected_receive_period_in_days' => 1 |
|
20 |
+ 'expected_receive_period_in_days' => 1, |
|
21 |
+ 'method' => 'post', |
|
22 |
+ 'payload' => { |
|
23 |
+ 'key' => 'value' |
|
24 |
+ }, |
|
25 |
+ 'headers' => {} |
|
16 | 26 |
} |
17 | 27 |
end |
18 | 28 |
|
@@ -20,23 +30,71 @@ module Agents |
||
20 | 30 |
last_receive_at && last_receive_at > options['expected_receive_period_in_days'].to_i.days.ago && !recent_error_logs? |
21 | 31 |
end |
22 | 32 |
|
33 |
+ def method |
|
34 |
+ (options['method'].presence || 'post').to_s.downcase |
|
35 |
+ end |
|
36 |
+ |
|
37 |
+ def headers |
|
38 |
+ options['headers'].presence || {} |
|
39 |
+ end |
|
40 |
+ |
|
23 | 41 |
def validate_options |
24 | 42 |
unless options['post_url'].present? && options['expected_receive_period_in_days'].present? |
25 | 43 |
errors.add(:base, "post_url and expected_receive_period_in_days are required fields") |
26 | 44 |
end |
27 |
- end |
|
28 | 45 |
|
29 |
- def post_event(uri, event) |
|
30 |
- req = Net::HTTP::Post.new(uri.request_uri) |
|
31 |
- req.form_data = event |
|
32 |
- Net::HTTP.start(uri.hostname, uri.port, :use_ssl => uri.scheme == "https") { |http| http.request(req) } |
|
46 |
+ if options['payload'].present? && !options['payload'].is_a?(Hash) |
|
47 |
+ errors.add(:base, "if provided, payload must be a hash") |
|
48 |
+ end |
|
49 |
+ |
|
50 |
+ unless %w[post get].include?(method) |
|
51 |
+ errors.add(:base, "method must be 'post' or 'get'") |
|
52 |
+ end |
|
53 |
+ |
|
54 |
+ unless headers.is_a?(Hash) |
|
55 |
+ errors.add(:base, "if provided, headers must be a hash") |
|
56 |
+ end |
|
33 | 57 |
end |
34 | 58 |
|
35 | 59 |
def receive(incoming_events) |
36 | 60 |
incoming_events.each do |event| |
37 |
- uri = URI options[:post_url] |
|
38 |
- post_event uri, event.payload |
|
61 |
+ handle (options['payload'].presence || {}).merge(event.payload) |
|
39 | 62 |
end |
40 | 63 |
end |
64 |
+ |
|
65 |
+ def check |
|
66 |
+ handle options['payload'].presence || {} |
|
67 |
+ end |
|
68 |
+ |
|
69 |
+ def generate_uri(params = nil) |
|
70 |
+ uri = URI options[:post_url] |
|
71 |
+ uri.query = URI.encode_www_form(Hash[URI.decode_www_form(uri.query || '')].merge(params)) if params |
|
72 |
+ uri |
|
73 |
+ end |
|
74 |
+ |
|
75 |
+ private |
|
76 |
+ |
|
77 |
+ def handle(data) |
|
78 |
+ if method == 'post' |
|
79 |
+ post_data(data) |
|
80 |
+ elsif method == 'get' |
|
81 |
+ get_data(data) |
|
82 |
+ else |
|
83 |
+ error "Invalid method '#{method}'" |
|
84 |
+ end |
|
85 |
+ end |
|
86 |
+ |
|
87 |
+ def post_data(data) |
|
88 |
+ uri = generate_uri |
|
89 |
+ req = Net::HTTP::Post.new(uri.request_uri, headers) |
|
90 |
+ req.form_data = data |
|
91 |
+ Net::HTTP.start(uri.hostname, uri.port, :use_ssl => uri.scheme == "https") { |http| http.request(req) } |
|
92 |
+ end |
|
93 |
+ |
|
94 |
+ def get_data(data) |
|
95 |
+ uri = generate_uri(data) |
|
96 |
+ req = Net::HTTP::Get.new(uri.request_uri, headers) |
|
97 |
+ Net::HTTP.start(uri.hostname, uri.port, :use_ssl => uri.scheme == "https") { |http| http.request(req) } |
|
98 |
+ end |
|
41 | 99 |
end |
42 | 100 |
end |
@@ -0,0 +1,111 @@ |
||
1 |
+require 'open3' |
|
2 |
+ |
|
3 |
+module Agents |
|
4 |
+ class ShellCommandAgent < Agent |
|
5 |
+ default_schedule "never" |
|
6 |
+ |
|
7 |
+ def self.should_run? |
|
8 |
+ ENV['ENABLE_INSECURE_AGENTS'] == "true" |
|
9 |
+ end |
|
10 |
+ |
|
11 |
+ description <<-MD |
|
12 |
+ The ShellCommandAgent can execute commands on your local system, returning the output. |
|
13 |
+ |
|
14 |
+ `command` specifies the command to be executed, and `path` will tell ShellCommandAgent in what directory to run this command. |
|
15 |
+ |
|
16 |
+ `expected_update_period_in_days` is used to determine if the Agent is working. |
|
17 |
+ |
|
18 |
+ ShellCommandAgent can also act upon received events. These events may contain their own `path` and `command` values. If they do not, ShellCommandAgent will use the configured options. For this reason, please specify defaults even if you are planning to have this Agent to respond to events. |
|
19 |
+ |
|
20 |
+ The resulting event will contain the `command` which was executed, the `path` it was executed under, the `exit_status` of the command, the `errors`, and the actual `output`. ShellCommandAgent will not log an error if the result implies that something went wrong. |
|
21 |
+ |
|
22 |
+ *Warning*: This type of Agent runs arbitrary commands on your system, #{Agents::ShellCommandAgent.should_run? ? "but is **currently enabled**" : "and is **currently disabled**"}. |
|
23 |
+ Only enable this Agent if you trust everyone using your Huginn installation. |
|
24 |
+ You can enable this Agent in your .env file by setting `ENABLE_INSECURE_AGENTS` to `true`. |
|
25 |
+ MD |
|
26 |
+ |
|
27 |
+ event_description <<-MD |
|
28 |
+ Events look like this: |
|
29 |
+ |
|
30 |
+ { |
|
31 |
+ 'command' => 'pwd', |
|
32 |
+ 'path' => '/home/Huginn', |
|
33 |
+ 'exit_status' => '0', |
|
34 |
+ 'errors' => '', |
|
35 |
+ 'output' => '/home/Huginn' |
|
36 |
+ } |
|
37 |
+ MD |
|
38 |
+ |
|
39 |
+ def default_options |
|
40 |
+ { |
|
41 |
+ 'path' => "/", |
|
42 |
+ 'command' => "pwd", |
|
43 |
+ 'expected_update_period_in_days' => 1 |
|
44 |
+ } |
|
45 |
+ end |
|
46 |
+ |
|
47 |
+ def validate_options |
|
48 |
+ unless options['path'].present? && options['command'].present? && options['expected_update_period_in_days'].present? |
|
49 |
+ errors.add(:base, "The path, command, and expected_update_period_in_days fields are all required.") |
|
50 |
+ end |
|
51 |
+ |
|
52 |
+ unless File.directory?(options['path']) |
|
53 |
+ errors.add(:base, "#{options['path']} is not a real directory.") |
|
54 |
+ end |
|
55 |
+ end |
|
56 |
+ |
|
57 |
+ def working? |
|
58 |
+ Agents::ShellCommandAgent.should_run? && event_created_within?(options['expected_update_period_in_days']) && !recent_error_logs? |
|
59 |
+ end |
|
60 |
+ |
|
61 |
+ def receive(incoming_events) |
|
62 |
+ incoming_events.each do |event| |
|
63 |
+ handle(event.payload, event) |
|
64 |
+ end |
|
65 |
+ end |
|
66 |
+ |
|
67 |
+ def check |
|
68 |
+ handle(options) |
|
69 |
+ end |
|
70 |
+ |
|
71 |
+ private |
|
72 |
+ |
|
73 |
+ def handle(opts = options, event = nil) |
|
74 |
+ if Agents::ShellCommandAgent.should_run? |
|
75 |
+ command = opts['command'] || options['command'] |
|
76 |
+ path = opts['path'] || options['path'] |
|
77 |
+ |
|
78 |
+ result, errors, exit_status = run_command(path, command) |
|
79 |
+ |
|
80 |
+ vals = {"command" => command, "path" => path, "exit_status" => exit_status, "errors" => errors, "output" => result} |
|
81 |
+ created_event = create_event :payload => vals |
|
82 |
+ |
|
83 |
+ log("Ran '#{command}' under '#{path}'", :outbound_event => created_event, :inbound_event => event) |
|
84 |
+ else |
|
85 |
+ log("Unable to run because insecure agents are not enabled. Edit ENABLE_INSECURE_AGENTS in the Huginn .env configuration.") |
|
86 |
+ end |
|
87 |
+ end |
|
88 |
+ |
|
89 |
+ def run_command(path, command) |
|
90 |
+ result = nil |
|
91 |
+ errors = nil |
|
92 |
+ exit_status = nil |
|
93 |
+ |
|
94 |
+ Dir.chdir(path){ |
|
95 |
+ begin |
|
96 |
+ stdin, stdout, stderr, wait_thr = Open3.popen3(command) |
|
97 |
+ exit_status = wait_thr.value.to_i |
|
98 |
+ result = stdout.gets(nil) |
|
99 |
+ errors = stderr.gets(nil) |
|
100 |
+ rescue Exception => e |
|
101 |
+ errors = e.to_s |
|
102 |
+ end |
|
103 |
+ } |
|
104 |
+ |
|
105 |
+ result = result.to_s.strip |
|
106 |
+ errors = errors.to_s.strip |
|
107 |
+ |
|
108 |
+ [result, errors, exit_status] |
|
109 |
+ end |
|
110 |
+ end |
|
111 |
+end |
@@ -11,6 +11,8 @@ module Agents |
||
11 | 11 |
|
12 | 12 |
The `type` can be one of #{VALID_COMPARISON_TYPES.map { |t| "`#{t}`" }.to_sentence} and compares with the `value`. |
13 | 13 |
|
14 |
+ The `value` can be a single value or an array of values. In the case of an array, if one or more values match then the rule matches. |
|
15 |
+ |
|
14 | 16 |
All rules must match for the Agent to match. The resulting Event will have a payload message of `message`. You can include extractions in the message, for example: `I saw a bar of: <foo.bar>` |
15 | 17 |
|
16 | 18 |
Set `expected_receive_period_in_days` to the maximum amount of time that you'd expect to pass between Events being received by this Agent. |
@@ -49,25 +51,30 @@ module Agents |
||
49 | 51 |
incoming_events.each do |event| |
50 | 52 |
match = options['rules'].all? do |rule| |
51 | 53 |
value_at_path = Utils.value_at(event['payload'], rule['path']) |
52 |
- case rule['type'] |
|
54 |
+ rule_values = rule['value'] |
|
55 |
+ rule_values = [rule_values] unless rule_values.is_a?(Array) |
|
56 |
+ |
|
57 |
+ match_found = rule_values.any? do |rule_value| |
|
58 |
+ case rule['type'] |
|
53 | 59 |
when "regex" |
54 |
- value_at_path.to_s =~ Regexp.new(rule['value'], Regexp::IGNORECASE) |
|
60 |
+ value_at_path.to_s =~ Regexp.new(rule_value, Regexp::IGNORECASE) |
|
55 | 61 |
when "!regex" |
56 |
- value_at_path.to_s !~ Regexp.new(rule['value'], Regexp::IGNORECASE) |
|
62 |
+ value_at_path.to_s !~ Regexp.new(rule_value, Regexp::IGNORECASE) |
|
57 | 63 |
when "field>value" |
58 |
- value_at_path.to_f > rule['value'].to_f |
|
64 |
+ value_at_path.to_f > rule_value.to_f |
|
59 | 65 |
when "field>=value" |
60 |
- value_at_path.to_f >= rule['value'].to_f |
|
66 |
+ value_at_path.to_f >= rule_value.to_f |
|
61 | 67 |
when "field<value" |
62 |
- value_at_path.to_f < rule['value'].to_f |
|
68 |
+ value_at_path.to_f < rule_value.to_f |
|
63 | 69 |
when "field<=value" |
64 |
- value_at_path.to_f <= rule['value'].to_f |
|
70 |
+ value_at_path.to_f <= rule_value.to_f |
|
65 | 71 |
when "field==value" |
66 |
- value_at_path.to_s == rule['value'].to_s |
|
72 |
+ value_at_path.to_s == rule_value.to_s |
|
67 | 73 |
when "field!=value" |
68 |
- value_at_path.to_s != rule['value'].to_s |
|
74 |
+ value_at_path.to_s != rule_value.to_s |
|
69 | 75 |
else |
70 | 76 |
raise "Invalid type of #{rule['type']} in TriggerAgent##{id}" |
77 |
+ end |
|
71 | 78 |
end |
72 | 79 |
end |
73 | 80 |
|
@@ -75,10 +75,10 @@ module Agents |
||
75 | 75 |
end |
76 | 76 |
|
77 | 77 |
def post_url(server_url,secret) |
78 |
- "#{server_url}/users/#{self.user.id}/webhooks/#{self.id}/#{secret}" |
|
78 |
+ "#{server_url}/users/#{self.user.id}/web_requests/#{self.id}/#{secret}" |
|
79 | 79 |
end |
80 | 80 |
|
81 |
- def receive_webhook(params) |
|
81 |
+ def receive_web_request(params, method, format) |
|
82 | 82 |
if memory['pending_calls'].has_key? params['secret'] |
83 | 83 |
response = Twilio::TwiML::Response.new {|r| r.Say memory['pending_calls'][params['secret']], :voice => 'woman'} |
84 | 84 |
memory['pending_calls'].delete params['secret'] |
@@ -25,7 +25,7 @@ module Agents |
||
25 | 25 |
end |
26 | 26 |
|
27 | 27 |
def working? |
28 |
- event_created_within?(options['expected_update_period_in_days']) && most_recent_event.payload['success'] == true && !recent_error_logs? |
|
28 |
+ event_created_within?(options['expected_update_period_in_days']) && most_recent_event && most_recent_event.payload['success'] == true && !recent_error_logs? |
|
29 | 29 |
end |
30 | 30 |
|
31 | 31 |
def default_options |
@@ -43,10 +43,11 @@ module Agents |
||
43 | 43 |
incoming_events.each do |event| |
44 | 44 |
tweet_text = Utils.value_at(event.payload, options['message_path']) |
45 | 45 |
begin |
46 |
- publish_tweet tweet_text |
|
46 |
+ tweet = publish_tweet tweet_text |
|
47 | 47 |
create_event :payload => { |
48 | 48 |
'success' => true, |
49 | 49 |
'published_tweet' => tweet_text, |
50 |
+ 'tweet_id' => tweet.id, |
|
50 | 51 |
'agent_id' => event.agent_id, |
51 | 52 |
'event_id' => event.id |
52 | 53 |
} |
@@ -1,6 +1,7 @@ |
||
1 | 1 |
module Agents |
2 | 2 |
class WebhookAgent < Agent |
3 | 3 |
cannot_be_scheduled! |
4 |
+ cannot_receive_events! |
|
4 | 5 |
|
5 | 6 |
description do |
6 | 7 |
<<-MD |
@@ -8,7 +9,7 @@ module Agents |
||
8 | 9 |
|
9 | 10 |
In order to create events with this agent, make a POST request to: |
10 | 11 |
``` |
11 |
- https://#{ENV['DOMAIN']}/users/#{user.id}/webhooks/#{id || '<id>'}/:secret |
|
12 |
+ https://#{ENV['DOMAIN']}/users/#{user.id}/web_requests/#{id || '<id>'}/:secret |
|
12 | 13 |
``` where `:secret` is specified in your options. |
13 | 14 |
|
14 | 15 |
The |
@@ -36,8 +37,9 @@ module Agents |
||
36 | 37 |
"payload_path" => "payload"} |
37 | 38 |
end |
38 | 39 |
|
39 |
- def receive_webhook(params) |
|
40 |
+ def receive_web_request(params, method, format) |
|
40 | 41 |
secret = params.delete('secret') |
42 |
+ return ["Please use POST requests only", 401] unless method == "post" |
|
41 | 43 |
return ["Not Authorized", 401] unless secret == options['secret'] |
42 | 44 |
|
43 | 45 |
create_event(:payload => payload_for(params)) |
@@ -16,11 +16,13 @@ module Agents |
||
16 | 16 |
|
17 | 17 |
Specify a `url` and select a `mode` for when to create Events based on the scraped data, either `all` or `on_change`. |
18 | 18 |
|
19 |
+ `url` can be a single url, or an array of urls (for example, for multiple pages with the exact same structure but different content to scrape) |
|
20 |
+ |
|
19 | 21 |
The `type` value can be `xml`, `html`, or `json`. |
20 | 22 |
|
21 | 23 |
To tell the Agent how to parse the content, specify `extract` as a hash with keys naming the extractions and values of hashes. |
22 | 24 |
|
23 |
- When parsing HTML or XML, these sub-hashes specify how to extract with a `css` CSS selector and either `'text': true` or `attr` pointing to an attribute name to grab. An example: |
|
25 |
+ When parsing HTML or XML, these sub-hashes specify how to extract with either a `css` CSS selector or a `xpath` XPath expression and either `'text': true` or `attr` pointing to an attribute name to grab. An example: |
|
24 | 26 |
|
25 | 27 |
'extract': { |
26 | 28 |
'url': { 'css': "#comic img", 'attr': "src" }, |
@@ -42,6 +44,8 @@ module Agents |
||
42 | 44 |
Set `expected_update_period_in_days` to the maximum amount of time that you'd expect to pass between Events being created by this Agent. This is only used to set the "working" status. |
43 | 45 |
|
44 | 46 |
Set `uniqueness_look_back` to limit the number of events checked for uniqueness (typically for performance). This defaults to the larger of #{UNIQUENESS_LOOK_BACK} or #{UNIQUENESS_FACTOR}x the number of detected received results. |
47 |
+ |
|
48 |
+ Set `force_encoding` to an encoding name if the website does not return a Content-Type header with a proper charset. |
|
45 | 49 |
MD |
46 | 50 |
|
47 | 51 |
event_description do |
@@ -59,8 +63,9 @@ module Agents |
||
59 | 63 |
'type' => "html", |
60 | 64 |
'mode' => "on_change", |
61 | 65 |
'extract' => { |
62 |
- 'url' => {'css' => "#comic img", 'attr' => "src"}, |
|
63 |
- 'title' => {'css' => "#comic img", 'attr' => "title"} |
|
66 |
+ 'url' => { 'css' => "#comic img", 'attr' => "src" }, |
|
67 |
+ 'title' => { 'css' => "#comic img", 'attr' => "alt" }, |
|
68 |
+ 'hovertext' => { 'css' => "#comic img", 'attr' => "title" } |
|
64 | 69 |
} |
65 | 70 |
} |
66 | 71 |
end |
@@ -84,6 +89,19 @@ module Agents |
||
84 | 89 |
if options['uniqueness_look_back'].present? |
85 | 90 |
errors.add(:base, "Invalid uniqueness_look_back format") unless is_positive_integer?(options['uniqueness_look_back']) |
86 | 91 |
end |
92 |
+ |
|
93 |
+ if (encoding = options['force_encoding']).present? |
|
94 |
+ case encoding |
|
95 |
+ when String |
|
96 |
+ begin |
|
97 |
+ Encoding.find(encoding) |
|
98 |
+ rescue ArgumentError |
|
99 |
+ errors.add(:base, "Unknown encoding: #{encoding.inspect}") |
|
100 |
+ end |
|
101 |
+ else |
|
102 |
+ errors.add(:base, "force_encoding must be a string") |
|
103 |
+ end |
|
104 |
+ end |
|
87 | 105 |
end |
88 | 106 |
|
89 | 107 |
def check |
@@ -91,66 +109,97 @@ module Agents |
||
91 | 109 |
log "Fetching #{options['url']}" |
92 | 110 |
request_opts = { :followlocation => true } |
93 | 111 |
request_opts[:userpwd] = options['basic_auth'] if options['basic_auth'].present? |
94 |
- request = Typhoeus::Request.new(options['url'], request_opts) |
|
95 | 112 |
|
96 |
- request.on_failure do |response| |
|
97 |
- error "Failed: #{response.inspect}" |
|
113 |
+ requests = [] |
|
114 |
+ |
|
115 |
+ if options['url'].kind_of?(Array) |
|
116 |
+ options['url'].each do |url| |
|
117 |
+ requests.push(Typhoeus::Request.new(url, request_opts)) |
|
118 |
+ end |
|
119 |
+ else |
|
120 |
+ requests.push(Typhoeus::Request.new(options['url'], request_opts)) |
|
98 | 121 |
end |
99 | 122 |
|
100 |
- request.on_success do |response| |
|
101 |
- doc = parse(response.body) |
|
123 |
+ requests.each do |request| |
|
124 |
+ request.on_failure do |response| |
|
125 |
+ error "Failed: #{response.inspect}" |
|
126 |
+ end |
|
102 | 127 |
|
103 |
- if extract_full_json? |
|
104 |
- if store_payload!(previous_payloads(1), doc) |
|
105 |
- log "Storing new result for '#{name}': #{doc.inspect}" |
|
106 |
- create_event :payload => doc |
|
128 |
+ request.on_success do |response| |
|
129 |
+ body = response.body |
|
130 |
+ if (encoding = options['force_encoding']).present? |
|
131 |
+ body = body.encode(Encoding::UTF_8, encoding) |
|
107 | 132 |
end |
108 |
- else |
|
109 |
- output = {} |
|
110 |
- options['extract'].each do |name, extraction_details| |
|
111 |
- result = if extraction_type == "json" |
|
112 |
- output[name] = Utils.values_at(doc, extraction_details['path']) |
|
113 |
- else |
|
114 |
- output[name] = doc.css(extraction_details['css']).map { |node| |
|
115 |
- if extraction_details['attr'] |
|
116 |
- node.attr(extraction_details['attr']) |
|
117 |
- elsif extraction_details['text'] |
|
118 |
- node.text() |
|
119 |
- else |
|
120 |
- error "'attr' or 'text' is required on HTML or XML extraction patterns" |
|
121 |
- return |
|
122 |
- end |
|
123 |
- } |
|
124 |
- end |
|
125 |
- log "Extracting #{extraction_type} at #{extraction_details['path'] || extraction_details['css']}: #{result}" |
|
126 |
- end |
|
127 |
- |
|
128 |
- num_unique_lengths = options['extract'].keys.map { |name| output[name].length }.uniq |
|
133 |
+ doc = parse(body) |
|
129 | 134 |
|
130 |
- if num_unique_lengths.length != 1 |
|
131 |
- error "Got an uneven number of matches for #{options['name']}: #{options['extract'].inspect}" |
|
132 |
- return |
|
133 |
- end |
|
134 |
- |
|
135 |
- old_events = previous_payloads num_unique_lengths.first |
|
136 |
- num_unique_lengths.first.times do |index| |
|
137 |
- result = {} |
|
138 |
- options['extract'].keys.each do |name| |
|
139 |
- result[name] = output[name][index] |
|
140 |
- if name.to_s == 'url' |
|
141 |
- result[name] = URI.join(options['url'], result[name]).to_s if (result[name] =~ URI::DEFAULT_PARSER.regexp[:ABS_URI]).nil? |
|
135 |
+ if extract_full_json? |
|
136 |
+ if store_payload!(previous_payloads(1), doc) |
|
137 |
+ log "Storing new result for '#{name}': #{doc.inspect}" |
|
138 |
+ create_event :payload => doc |
|
139 |
+ end |
|
140 |
+ else |
|
141 |
+ output = {} |
|
142 |
+ options['extract'].each do |name, extraction_details| |
|
143 |
+ if extraction_type == "json" |
|
144 |
+ result = Utils.values_at(doc, extraction_details['path']) |
|
145 |
+ log "Extracting #{extraction_type} at #{extraction_details['path']}: #{result}" |
|
146 |
+ else |
|
147 |
+ case |
|
148 |
+ when css = extraction_details['css'] |
|
149 |
+ nodes = doc.css(css) |
|
150 |
+ when xpath = extraction_details['xpath'] |
|
151 |
+ nodes = doc.xpath(xpath) |
|
152 |
+ else |
|
153 |
+ error "'css' or 'xpath' is required for HTML or XML extraction" |
|
154 |
+ return |
|
155 |
+ end |
|
156 |
+ unless Nokogiri::XML::NodeSet === nodes |
|
157 |
+ error "The result of HTML/XML extraction was not a NodeSet" |
|
158 |
+ return |
|
159 |
+ end |
|
160 |
+ result = nodes.map { |node| |
|
161 |
+ if extraction_details['attr'] |
|
162 |
+ node.attr(extraction_details['attr']) |
|
163 |
+ elsif extraction_details['text'] |
|
164 |
+ node.text() |
|
165 |
+ else |
|
166 |
+ error "'attr' or 'text' is required on HTML or XML extraction patterns" |
|
167 |
+ return |
|
168 |
+ end |
|
169 |
+ } |
|
170 |
+ log "Extracting #{extraction_type} at #{xpath || css}: #{result}" |
|
142 | 171 |
end |
172 |
+ output[name] = result |
|
143 | 173 |
end |
144 | 174 |
|
145 |
- if store_payload!(old_events, result) |
|
146 |
- log "Storing new parsed result for '#{name}': #{result.inspect}" |
|
147 |
- create_event :payload => result |
|
175 |
+ num_unique_lengths = options['extract'].keys.map { |name| output[name].length }.uniq |
|
176 |
+ |
|
177 |
+ if num_unique_lengths.length != 1 |
|
178 |
+ error "Got an uneven number of matches for #{options['name']}: #{options['extract'].inspect}" |
|
179 |
+ return |
|
180 |
+ end |
|
181 |
+ |
|
182 |
+ old_events = previous_payloads num_unique_lengths.first |
|
183 |
+ num_unique_lengths.first.times do |index| |
|
184 |
+ result = {} |
|
185 |
+ options['extract'].keys.each do |name| |
|
186 |
+ result[name] = output[name][index] |
|
187 |
+ if name.to_s == 'url' |
|
188 |
+ result[name] = URI.join(options['url'], result[name]).to_s if (result[name] =~ URI::DEFAULT_PARSER.regexp[:ABS_URI]).nil? |
|
189 |
+ end |
|
190 |
+ end |
|
191 |
+ |
|
192 |
+ if store_payload!(old_events, result) |
|
193 |
+ log "Storing new parsed result for '#{name}': #{result.inspect}" |
|
194 |
+ create_event :payload => result |
|
195 |
+ end |
|
148 | 196 |
end |
149 | 197 |
end |
150 | 198 |
end |
199 |
+ |
|
200 |
+ hydra.queue request |
|
201 |
+ hydra.run |
|
151 | 202 |
end |
152 |
- hydra.queue request |
|
153 |
- hydra.run |
|
154 | 203 |
end |
155 | 204 |
|
156 | 205 |
private |
@@ -227,4 +276,4 @@ module Agents |
||
227 | 276 |
end |
228 | 277 |
end |
229 | 278 |
end |
230 |
-end |
|
279 |
+end |
@@ -0,0 +1,12 @@ |
||
1 |
+<p> |
|
2 |
+ Data for this Agent is available at these URLs: |
|
3 |
+</p> |
|
4 |
+ |
|
5 |
+ |
|
6 |
+<ul> |
|
7 |
+ <% @agent.options['secrets'].each do |secret| %> |
|
8 |
+ <% url = lambda { |format| web_requests_url(:agent_id => @agent.id, :user_id => current_user.id, :secret => secret, :format => format) } %> |
|
9 |
+ <li><%= link_to url.call(:json), url.call(:json), :target => :blank %></li> |
|
10 |
+ <li><%= link_to url.call(:xml), url.call(:xml), :target => :blank %></li> |
|
11 |
+ <% end %> |
|
12 |
+</ul> |
@@ -0,0 +1,3 @@ |
||
1 |
+<p> |
|
2 |
+ Send WebHooks (POST requests) to this Agent at <%= link_to web_requests_url(:agent_id => @agent.id, :user_id => current_user.id, :secret => @agent.options['secret']), web_requests_url(:agent_id => @agent.id, :user_id => current_user.id, :secret => @agent.options['secret']), :target => :blank %> |
|
3 |
+</p> |
@@ -19,8 +19,7 @@ |
||
19 | 19 |
end |
20 | 20 |
dot_format_string = dot_format_string + "}" |
21 | 21 |
%> |
22 |
- |
|
23 |
- <img src="https://chart.googleapis.com/chart?cht=gv&chl=<%= CGI::escape dot_format_string %>" /> |
|
22 |
+ <%= render_dot(dot_format_string) %> |
|
24 | 23 |
</div> |
25 | 24 |
</div> |
26 | 25 |
</div> |
@@ -17,9 +17,15 @@ |
||
17 | 17 |
</form> |
18 | 18 |
|
19 | 19 |
<li id='job-indicator'> |
20 |
- <a href="/delayed_job"> |
|
21 |
- <span class="badge"><i class="icon-refresh icon-white"></i> <span class='number'>0</span></span> |
|
22 |
- </a> |
|
20 |
+ <% if defined?(DelayedJobWeb) %> |
|
21 |
+ <a href="/delayed_job"> |
|
22 |
+ <span class="badge"><i class="icon-refresh icon-white"></i> <span class='number'>0</span></span> |
|
23 |
+ </a> |
|
24 |
+ <% else %> |
|
25 |
+ <a href="#" onclick='return false;'> |
|
26 |
+ <span class="badge"><i class="icon-refresh icon-white"></i> <span class='number'>0</span></span> |
|
27 |
+ </a> |
|
28 |
+ <% end %> |
|
23 | 29 |
</li> |
24 | 30 |
|
25 | 31 |
<li id='event-indicator'> |
@@ -64,7 +64,7 @@ class HuginnScheduler |
||
64 | 64 |
|
65 | 65 |
# Schedule repeating events. |
66 | 66 |
|
67 |
- %w[2m 5m 10m 30m 1h 2h 5h 12h 1d 2d 7d].each do |schedule| |
|
67 |
+ %w[1m 2m 5m 10m 30m 1h 2h 5h 12h 1d 2d 7d].each do |schedule| |
|
68 | 68 |
rufus_scheduler.every schedule do |
69 | 69 |
run_schedule "every_#{schedule}" |
70 | 70 |
end |
@@ -2,6 +2,8 @@ |
||
2 | 2 |
|
3 | 3 |
require ::File.expand_path('../config/environment', __FILE__) |
4 | 4 |
|
5 |
+# To enable DelayedJobWeb, see the 'Enable DelayedJobWeb' section of the README. |
|
6 |
+ |
|
5 | 7 |
# if Rails.env.production? |
6 | 8 |
# DelayedJobWeb.use Rack::Auth::Basic do |username, password| |
7 | 9 |
# username == 'admin' && password == 'password' |
@@ -32,9 +32,13 @@ Huginn::Application.routes.draw do |
||
32 | 32 |
match "/worker_status" => "worker_status#show" |
33 | 33 |
|
34 | 34 |
post "/users/:user_id/update_location/:secret" => "user_location_updates#create" |
35 |
- post "/users/:user_id/webhooks/:agent_id/:secret" => "webhooks#create" |
|
36 | 35 |
|
36 |
+ match "/users/:user_id/web_requests/:agent_id/:secret" => "web_requests#handle_request", :as => :web_requests |
|
37 |
+ post "/users/:user_id/webhooks/:agent_id/:secret" => "web_requests#handle_request" # legacy |
|
38 |
+ |
|
39 |
+# To enable DelayedJobWeb, see the 'Enable DelayedJobWeb' section of the README. |
|
37 | 40 |
# match "/delayed_job" => DelayedJobWeb, :anchor => false |
41 |
+ |
|
38 | 42 |
devise_for :users, :sign_out_via => [ :post, :delete ] |
39 | 43 |
|
40 | 44 |
match "/about" => "home#about" |
@@ -0,0 +1,9 @@ |
||
1 |
+class RenameWebhookToWebRequest < ActiveRecord::Migration |
|
2 |
+ def up |
|
3 |
+ rename_column :agents, :last_webhook_at, :last_web_request_at |
|
4 |
+ end |
|
5 |
+ |
|
6 |
+ def down |
|
7 |
+ rename_column :agents, :last_web_request_at, :last_webhook_at |
|
8 |
+ end |
|
9 |
+end |
@@ -11,7 +11,7 @@ |
||
11 | 11 |
# |
12 | 12 |
# It's strongly recommended to check this file into your version control system. |
13 | 13 |
|
14 |
-ActiveRecord::Schema.define(:version => 20140403043556) do |
|
14 |
+ActiveRecord::Schema.define(:version => 20140408150825) do |
|
15 | 15 |
|
16 | 16 |
create_table "agent_logs", :force => true do |t| |
17 | 17 |
t.integer "agent_id", :null => false |
@@ -36,7 +36,7 @@ ActiveRecord::Schema.define(:version => 20140403043556) do |
||
36 | 36 |
t.datetime "created_at", :null => false |
37 | 37 |
t.datetime "updated_at", :null => false |
38 | 38 |
t.text "memory", :limit => 2147483647 |
39 |
- t.datetime "last_webhook_at" |
|
39 |
+ t.datetime "last_web_request_at" |
|
40 | 40 |
t.integer "keep_events_for", :default => 0, :null => false |
41 | 41 |
t.datetime "last_event_at" |
42 | 42 |
t.datetime "last_error_log_at" |
@@ -32,7 +32,8 @@ unless user.agents.where(:name => "XKCD Source").exists? |
||
32 | 32 |
'expected_update_period_in_days' => 5, |
33 | 33 |
'extract' => { |
34 | 34 |
'url' => { 'css' => "#comic img", 'attr' => "src" }, |
35 |
- 'title' => { 'css' => "#comic img", 'attr' => "title" } |
|
35 |
+ 'title' => { 'css' => "#comic img", 'attr' => "alt" }, |
|
36 |
+ 'hovertext' => { 'css' => "#comic img", 'attr' => "title" } |
|
36 | 37 |
} |
37 | 38 |
}).save! |
38 | 39 |
end |
@@ -15,6 +15,16 @@ Vagrant.configure("2") do |config| |
||
15 | 15 |
end |
16 | 16 |
end |
17 | 17 |
|
18 |
+ config.vm.define :prl do |prl| |
|
19 |
+ prl.vm.box = "parallels/ubuntu-12.04" |
|
20 |
+ |
|
21 |
+ prl.vm.provision :chef_solo do |chef| |
|
22 |
+ chef.roles_path = "roles" |
|
23 |
+ chef.cookbooks_path = ["cookbooks", "site-cookbooks"] |
|
24 |
+ chef.add_role("huginn_development") |
|
25 |
+ end |
|
26 |
+ end |
|
27 |
+ |
|
18 | 28 |
config.vm.define :ec2 do |ec2| |
19 | 29 |
ec2.vm.box = "dummy" |
20 | 30 |
ec2.vm.box_url = "https://github.com/mitchellh/vagrant-aws/raw/master/dummy.box" |
@@ -16,12 +16,19 @@ group "huginn" do |
||
16 | 16 |
action :create |
17 | 17 |
end |
18 | 18 |
|
19 |
-%w("ruby1.9.1" "ruby1.9.1-dev" "libxslt-dev" "libxml2-dev" "curl").each do |pkg| |
|
19 |
+%w("ruby1.9.1" "ruby1.9.1-dev" "libxslt-dev" "libxml2-dev" "curl" "libmysqlclient-dev" "rubygems").each do |pkg| |
|
20 | 20 |
package pkg do |
21 | 21 |
action :install |
22 | 22 |
end |
23 | 23 |
end |
24 | 24 |
|
25 |
+bash "Setting default ruby version to 1.9" do |
|
26 |
+ code <<-EOH |
|
27 |
+ update-alternatives --set ruby /usr/bin/ruby1.9.1 |
|
28 |
+ update-alternatives --set gem /usr/bin/gem1.9.1 |
|
29 |
+ EOH |
|
30 |
+end |
|
31 |
+ |
|
25 | 32 |
git "/home/huginn/huginn" do |
26 | 33 |
repository 'git://github.com/cantino/huginn.git' |
27 | 34 |
reference 'master' |
@@ -49,9 +56,9 @@ bash "huginn dependencies" do |
||
49 | 56 |
export LC_ALL="en_US.UTF-8" |
50 | 57 |
sudo bundle install |
51 | 58 |
sed s/REPLACE_ME_NOW\!/$(sudo rake secret)/ .env.example > .env |
52 |
- sudo rake db:create |
|
53 |
- sudo rake db:migrate |
|
54 |
- sudo rake db:seed |
|
59 |
+ sudo bundle exec rake db:create |
|
60 |
+ sudo bundle exec rake db:migrate |
|
61 |
+ sudo bundle exec rake db:seed |
|
55 | 62 |
EOH |
56 | 63 |
end |
57 | 64 |
|
@@ -59,6 +66,6 @@ bash "huginn has been installed and will start in a minute" do |
||
59 | 66 |
user "huginn" |
60 | 67 |
cwd "/home/huginn/huginn" |
61 | 68 |
code <<-EOH |
62 |
- sudo foreman start |
|
69 |
+ sudo nohup foreman start & |
|
63 | 70 |
EOH |
64 | 71 |
end |
@@ -14,7 +14,7 @@ group "huginn" do |
||
14 | 14 |
members ["huginn"] |
15 | 15 |
end |
16 | 16 |
|
17 |
-%w("ruby1.9.1" "ruby1.9.1-dev" "libxslt-dev" "libxml2-dev" "curl" "libshadow-ruby1.8").each do |pkg| |
|
17 |
+%w("ruby1.9.1" "ruby1.9.1-dev" "libxslt-dev" "libxml2-dev" "curl" "libshadow-ruby1.8" "libmysqlclient-dev").each do |pkg| |
|
18 | 18 |
package("#{pkg}") |
19 | 19 |
end |
20 | 20 |
|
@@ -84,9 +84,9 @@ deploy "/home/huginn" do |
||
84 | 84 |
sudo cp /home/huginn/shared/config/nginx.conf /etc/nginx/ |
85 | 85 |
sudo bundle install |
86 | 86 |
sed -i s/REPLACE_ME_NOW\!/$(sudo rake secret)/ .env |
87 |
- sudo rake db:create |
|
88 |
- sudo rake db:migrate |
|
89 |
- sudo rake db:seed |
|
87 |
+ sudo bundle exec rake db:create |
|
88 |
+ sudo bundle exec rake db:migrate |
|
89 |
+ sudo bundle exec rake db:seed |
|
90 | 90 |
sudo foreman export upstart /etc/init -a huginn -u huginn -l log |
91 | 91 |
sudo start huginn |
92 | 92 |
EOH |
@@ -21,20 +21,24 @@ module Utils |
||
21 | 21 |
end |
22 | 22 |
end |
23 | 23 |
|
24 |
- def self.interpolate_jsonpaths(value, data) |
|
25 |
- value.gsub(/<[^>]+>/).each { |jsonpath| |
|
26 |
- Utils.values_at(data, jsonpath[1..-2]).first.to_s |
|
27 |
- } |
|
24 |
+ def self.interpolate_jsonpaths(value, data, options = {}) |
|
25 |
+ if options[:leading_dollarsign_is_jsonpath] && value[0] == '$' |
|
26 |
+ Utils.values_at(data, value).first.to_s |
|
27 |
+ else |
|
28 |
+ value.gsub(/<[^>]+>/).each { |jsonpath| |
|
29 |
+ Utils.values_at(data, jsonpath[1..-2]).first.to_s |
|
30 |
+ } |
|
31 |
+ end |
|
28 | 32 |
end |
29 | 33 |
|
30 |
- def self.recursively_interpolate_jsonpaths(struct, data) |
|
34 |
+ def self.recursively_interpolate_jsonpaths(struct, data, options = {}) |
|
31 | 35 |
case struct |
32 | 36 |
when Hash |
33 |
- struct.inject({}) {|memo, (key, value)| memo[key] = recursively_interpolate_jsonpaths(value, data); memo } |
|
37 |
+ struct.inject({}) {|memo, (key, value)| memo[key] = recursively_interpolate_jsonpaths(value, data, options); memo } |
|
34 | 38 |
when Array |
35 |
- struct.map {|elem| recursively_interpolate_jsonpaths(elem, data) } |
|
39 |
+ struct.map {|elem| recursively_interpolate_jsonpaths(elem, data, options) } |
|
36 | 40 |
when String |
37 |
- interpolate_jsonpaths(struct, data) |
|
41 |
+ interpolate_jsonpaths(struct, data, options) |
|
38 | 42 |
else |
39 | 43 |
struct |
40 | 44 |
end |
@@ -52,7 +56,7 @@ module Utils |
||
52 | 56 |
escape = false |
53 | 57 |
end |
54 | 58 |
|
55 |
- result = JsonPath.new(path, :allow_eval => false).on(data.is_a?(String) ? data : data.to_json) |
|
59 |
+ result = JsonPath.new(path, :allow_eval => ENV['ALLOW_JSONPATH_EVAL'] == "true").on(data.is_a?(String) ? data : data.to_json) |
|
56 | 60 |
if escape |
57 | 61 |
result.map {|r| CGI::escape r } |
58 | 62 |
else |
@@ -75,4 +79,4 @@ module Utils |
||
75 | 79 |
def self.pretty_jsonify(thing) |
76 | 80 |
JSON.pretty_generate(thing).gsub('</', '<\/') |
77 | 81 |
end |
78 |
-end |
|
82 |
+end |
@@ -0,0 +1,97 @@ |
||
1 |
+require 'spec_helper' |
|
2 |
+ |
|
3 |
+describe WebRequestsController do |
|
4 |
+ class Agents::WebRequestReceiverAgent < Agent |
|
5 |
+ cannot_receive_events! |
|
6 |
+ cannot_be_scheduled! |
|
7 |
+ |
|
8 |
+ def receive_web_request(params, method, format) |
|
9 |
+ if params.delete(:secret) == options[:secret] |
|
10 |
+ memory[:web_request_values] = params |
|
11 |
+ memory[:web_request_format] = format |
|
12 |
+ memory[:web_request_method] = method |
|
13 |
+ ["success", 200, memory['content_type']] |
|
14 |
+ else |
|
15 |
+ ["failure", 404] |
|
16 |
+ end |
|
17 |
+ end |
|
18 |
+ end |
|
19 |
+ |
|
20 |
+ before do |
|
21 |
+ stub(Agents::WebRequestReceiverAgent).valid_type?("Agents::WebRequestReceiverAgent") { true } |
|
22 |
+ @agent = Agents::WebRequestReceiverAgent.new(:name => "something", :options => { :secret => "my_secret" }) |
|
23 |
+ @agent.user = users(:bob) |
|
24 |
+ @agent.save! |
|
25 |
+ end |
|
26 |
+ |
|
27 |
+ it "should not require login to receive a web request" do |
|
28 |
+ @agent.last_web_request_at.should be_nil |
|
29 |
+ post :handle_request, :user_id => users(:bob).to_param, :agent_id => @agent.id, :secret => "my_secret", :key => "value", :another_key => "5" |
|
30 |
+ @agent.reload.last_web_request_at.should be_within(2).of(Time.now) |
|
31 |
+ response.body.should == "success" |
|
32 |
+ response.should be_success |
|
33 |
+ end |
|
34 |
+ |
|
35 |
+ it "should call receive_web_request" do |
|
36 |
+ post :handle_request, :user_id => users(:bob).to_param, :agent_id => @agent.id, :secret => "my_secret", :key => "value", :another_key => "5" |
|
37 |
+ @agent.reload |
|
38 |
+ @agent.memory[:web_request_values].should == { 'key' => "value", 'another_key' => "5" } |
|
39 |
+ @agent.memory[:web_request_format].should == "text/html" |
|
40 |
+ @agent.memory[:web_request_method].should == "post" |
|
41 |
+ response.body.should == "success" |
|
42 |
+ response.headers['Content-Type'].should == 'text/plain; charset=utf-8' |
|
43 |
+ response.should be_success |
|
44 |
+ |
|
45 |
+ post :handle_request, :user_id => users(:bob).to_param, :agent_id => @agent.id, :secret => "not_my_secret", :no => "go" |
|
46 |
+ @agent.reload.memory[:web_request_values].should_not == { 'no' => "go" } |
|
47 |
+ response.body.should == "failure" |
|
48 |
+ response.should be_missing |
|
49 |
+ end |
|
50 |
+ |
|
51 |
+ it "should accept gets" do |
|
52 |
+ get :handle_request, :user_id => users(:bob).to_param, :agent_id => @agent.id, :secret => "my_secret", :key => "value", :another_key => "5" |
|
53 |
+ @agent.reload |
|
54 |
+ @agent.memory[:web_request_values].should == { 'key' => "value", 'another_key' => "5" } |
|
55 |
+ @agent.memory[:web_request_format].should == "text/html" |
|
56 |
+ @agent.memory[:web_request_method].should == "get" |
|
57 |
+ response.body.should == "success" |
|
58 |
+ response.should be_success |
|
59 |
+ end |
|
60 |
+ |
|
61 |
+ it "should pass through the received format" do |
|
62 |
+ get :handle_request, :user_id => users(:bob).to_param, :agent_id => @agent.id, :secret => "my_secret", :key => "value", :another_key => "5", :format => :json |
|
63 |
+ @agent.reload |
|
64 |
+ @agent.memory[:web_request_values].should == { 'key' => "value", 'another_key' => "5" } |
|
65 |
+ @agent.memory[:web_request_format].should == "application/json" |
|
66 |
+ @agent.memory[:web_request_method].should == "get" |
|
67 |
+ |
|
68 |
+ post :handle_request, :user_id => users(:bob).to_param, :agent_id => @agent.id, :secret => "my_secret", :key => "value", :another_key => "5", :format => :xml |
|
69 |
+ @agent.reload |
|
70 |
+ @agent.memory[:web_request_values].should == { 'key' => "value", 'another_key' => "5" } |
|
71 |
+ @agent.memory[:web_request_format].should == "application/xml" |
|
72 |
+ @agent.memory[:web_request_method].should == "post" |
|
73 |
+ |
|
74 |
+ put :handle_request, :user_id => users(:bob).to_param, :agent_id => @agent.id, :secret => "my_secret", :key => "value", :another_key => "5", :format => :atom |
|
75 |
+ @agent.reload |
|
76 |
+ @agent.memory[:web_request_values].should == { 'key' => "value", 'another_key' => "5" } |
|
77 |
+ @agent.memory[:web_request_format].should == "application/atom+xml" |
|
78 |
+ @agent.memory[:web_request_method].should == "put" |
|
79 |
+ end |
|
80 |
+ |
|
81 |
+ it "can accept a content-type to return" do |
|
82 |
+ @agent.memory['content_type'] = 'application/json' |
|
83 |
+ @agent.save! |
|
84 |
+ get :handle_request, :user_id => users(:bob).to_param, :agent_id => @agent.id, :secret => "my_secret", :key => "value", :another_key => "5" |
|
85 |
+ response.headers['Content-Type'].should == 'application/json; charset=utf-8' |
|
86 |
+ end |
|
87 |
+ |
|
88 |
+ it "should fail on incorrect users" do |
|
89 |
+ post :handle_request, :user_id => users(:jane).to_param, :agent_id => @agent.id, :secret => "my_secret", :no => "go" |
|
90 |
+ response.should be_missing |
|
91 |
+ end |
|
92 |
+ |
|
93 |
+ it "should fail on incorrect agents" do |
|
94 |
+ post :handle_request, :user_id => users(:bob).to_param, :agent_id => 454545, :secret => "my_secret", :no => "go" |
|
95 |
+ response.should be_missing |
|
96 |
+ end |
|
97 |
+end |
@@ -1,54 +0,0 @@ |
||
1 |
-require 'spec_helper' |
|
2 |
- |
|
3 |
-describe WebhooksController do |
|
4 |
- class Agents::WebhookReceiverAgent < Agent |
|
5 |
- cannot_receive_events! |
|
6 |
- cannot_be_scheduled! |
|
7 |
- |
|
8 |
- def receive_webhook(params) |
|
9 |
- if params.delete(:secret) == options[:secret] |
|
10 |
- memory[:webhook_values] = params |
|
11 |
- ["success", 200] |
|
12 |
- else |
|
13 |
- ["failure", 404] |
|
14 |
- end |
|
15 |
- end |
|
16 |
- end |
|
17 |
- |
|
18 |
- before do |
|
19 |
- stub(Agents::WebhookReceiverAgent).valid_type?("Agents::WebhookReceiverAgent") { true } |
|
20 |
- @agent = Agents::WebhookReceiverAgent.new(:name => "something", :options => { :secret => "my_secret" }) |
|
21 |
- @agent.user = users(:bob) |
|
22 |
- @agent.save! |
|
23 |
- end |
|
24 |
- |
|
25 |
- it "should not require login to trigger a webhook" do |
|
26 |
- @agent.last_webhook_at.should be_nil |
|
27 |
- post :create, :user_id => users(:bob).to_param, :agent_id => @agent.id, :secret => "my_secret", :key => "value", :another_key => "5" |
|
28 |
- @agent.reload.last_webhook_at.should be_within(2).of(Time.now) |
|
29 |
- response.body.should == "success" |
|
30 |
- response.should be_success |
|
31 |
- end |
|
32 |
- |
|
33 |
- it "should call receive_webhook" do |
|
34 |
- post :create, :user_id => users(:bob).to_param, :agent_id => @agent.id, :secret => "my_secret", :key => "value", :another_key => "5" |
|
35 |
- @agent.reload.memory[:webhook_values].should == { 'key' => "value", 'another_key' => "5" } |
|
36 |
- response.body.should == "success" |
|
37 |
- response.should be_success |
|
38 |
- |
|
39 |
- post :create, :user_id => users(:bob).to_param, :agent_id => @agent.id, :secret => "not_my_secret", :no => "go" |
|
40 |
- @agent.reload.memory[:webhook_values].should_not == { 'no' => "go" } |
|
41 |
- response.body.should == "failure" |
|
42 |
- response.should be_missing |
|
43 |
- end |
|
44 |
- |
|
45 |
- it "should fail on incorrect users" do |
|
46 |
- post :create, :user_id => users(:jane).to_param, :agent_id => @agent.id, :secret => "my_secret", :no => "go" |
|
47 |
- response.should be_missing |
|
48 |
- end |
|
49 |
- |
|
50 |
- it "should fail on incorrect agents" do |
|
51 |
- post :create, :user_id => users(:bob).to_param, :agent_id => 454545, :secret => "my_secret", :no => "go" |
|
52 |
- response.should be_missing |
|
53 |
- end |
|
54 |
-end |
@@ -0,0 +1,21 @@ |
||
1 |
+[ |
|
2 |
+ { |
|
3 |
+ "creator": { |
|
4 |
+ "fullsize_avatar_url": "https://dge9rmgqjs8m1.cloudfront.net/global/dfsdfsdfdsf/original.gif?r=3", |
|
5 |
+ "avatar_url": "http://dge9rmgqjs8m1.cloudfront.net/global/dfsdfsdfdsf/avatar.gif?r=3", |
|
6 |
+ "name": "Dominik Sander", |
|
7 |
+ "id": 123456 |
|
8 |
+ }, |
|
9 |
+ "attachments": [], |
|
10 |
+ "raw_excerpt": "test test", |
|
11 |
+ "excerpt": "test test", |
|
12 |
+ "id": 6454342343, |
|
13 |
+ "created_at": "2014-04-17T10:25:31.000+02:00", |
|
14 |
+ "updated_at": "2014-04-17T10:25:31.000+02:00", |
|
15 |
+ "summary": "commented on whaat", |
|
16 |
+ "action": "commented on", |
|
17 |
+ "target": "whaat", |
|
18 |
+ "url": "https://basecamp.com/12456/api/v1/projects/5476464-explore-basecamp/messages/24598238-whaat.json", |
|
19 |
+ "html_url": "https://basecamp.com/12456/projects/5476464-explore-basecamp/messages/24598238-whaat#comment_150756301" |
|
20 |
+ } |
|
21 |
+] |
@@ -28,8 +28,15 @@ describe Utils do |
||
28 | 28 |
end |
29 | 29 |
|
30 | 30 |
describe "#interpolate_jsonpaths" do |
31 |
+ let(:payload) { { :there => { :world => "WORLD" }, :works => "should work" } } |
|
32 |
+ |
|
31 | 33 |
it "interpolates jsonpath expressions between matching <>'s" do |
32 |
- Utils.interpolate_jsonpaths("hello <$.there.world> this <escape works>", { :there => { :world => "WORLD" }, :works => "should work" }).should == "hello WORLD this should+work" |
|
34 |
+ Utils.interpolate_jsonpaths("hello <$.there.world> this <escape works>", payload).should == "hello WORLD this should+work" |
|
35 |
+ end |
|
36 |
+ |
|
37 |
+ it "optionally supports treating values that start with '$' as raw JSONPath" do |
|
38 |
+ Utils.interpolate_jsonpaths("$.there.world", payload).should == "$.there.world" |
|
39 |
+ Utils.interpolate_jsonpaths("$.there.world", payload, :leading_dollarsign_is_jsonpath => true).should == "WORLD" |
|
33 | 40 |
end |
34 | 41 |
end |
35 | 42 |
|
@@ -514,7 +514,55 @@ describe Agent do |
||
514 | 514 |
end |
515 | 515 |
end |
516 | 516 |
end |
517 |
+ end |
|
518 |
+ |
|
519 |
+ describe ".trigger_web_request" do |
|
520 |
+ class Agents::WebRequestReceiver < Agent |
|
521 |
+ cannot_be_scheduled! |
|
522 |
+ end |
|
523 |
+ |
|
524 |
+ before do |
|
525 |
+ stub(Agents::WebRequestReceiver).valid_type?("Agents::WebRequestReceiver") { true } |
|
526 |
+ end |
|
527 |
+ |
|
528 |
+ context "when .receive_web_request is defined" do |
|
529 |
+ before do |
|
530 |
+ @agent = Agents::WebRequestReceiver.new(:name => "something") |
|
531 |
+ @agent.user = users(:bob) |
|
532 |
+ @agent.save! |
|
533 |
+ |
|
534 |
+ def @agent.receive_web_request(params, method, format) |
|
535 |
+ memory['last_request'] = [params, method, format] |
|
536 |
+ ['Ok!', 200] |
|
537 |
+ end |
|
538 |
+ end |
|
539 |
+ |
|
540 |
+ it "calls the .receive_web_request hook, updates last_web_request_at, and saves" do |
|
541 |
+ @agent.trigger_web_request({ :some_param => "some_value" }, "post", "text/html") |
|
542 |
+ @agent.reload.memory['last_request'].should == [ { "some_param" => "some_value" }, "post", "text/html" ] |
|
543 |
+ @agent.last_web_request_at.to_i.should be_within(1).of(Time.now.to_i) |
|
544 |
+ end |
|
545 |
+ end |
|
517 | 546 |
|
547 |
+ context "when .receive_webhook is defined" do |
|
548 |
+ before do |
|
549 |
+ @agent = Agents::WebRequestReceiver.new(:name => "something") |
|
550 |
+ @agent.user = users(:bob) |
|
551 |
+ @agent.save! |
|
552 |
+ |
|
553 |
+ def @agent.receive_webhook(params) |
|
554 |
+ memory['last_webhook_request'] = params |
|
555 |
+ ['Ok!', 200] |
|
556 |
+ end |
|
557 |
+ end |
|
558 |
+ |
|
559 |
+ it "outputs a deprecation warning and calls .receive_webhook with the params" do |
|
560 |
+ mock(Rails.logger).warn("DEPRECATED: The .receive_webhook method is deprecated, please switch your Agent to use .receive_web_request.") |
|
561 |
+ @agent.trigger_web_request({ :some_param => "some_value" }, "post", "text/html") |
|
562 |
+ @agent.reload.memory['last_webhook_request'].should == { "some_param" => "some_value" } |
|
563 |
+ @agent.last_web_request_at.to_i.should be_within(1).of(Time.now.to_i) |
|
564 |
+ end |
|
565 |
+ end |
|
518 | 566 |
end |
519 | 567 |
|
520 | 568 |
describe "recent_error_logs?" do |
@@ -0,0 +1,85 @@ |
||
1 |
+require 'spec_helper' |
|
2 |
+ |
|
3 |
+describe Agents::BasecampAgent do |
|
4 |
+ before(:each) do |
|
5 |
+ stub_request(:get, /json$/).to_return(:body => File.read(Rails.root.join("spec/data_fixtures/basecamp.json")), :status => 200, :headers => {"Content-Type" => "text/json"}) |
|
6 |
+ stub_request(:get, /Z$/).to_return(:body => File.read(Rails.root.join("spec/data_fixtures/basecamp.json")), :status => 200, :headers => {"Content-Type" => "text/json"}) |
|
7 |
+ @valid_params = { |
|
8 |
+ :username => "user", |
|
9 |
+ :password => "pass", |
|
10 |
+ :user_id => 12345, |
|
11 |
+ :project_id => 6789, |
|
12 |
+ } |
|
13 |
+ |
|
14 |
+ @checker = Agents::BasecampAgent.new(:name => "somename", :options => @valid_params) |
|
15 |
+ @checker.user = users(:jane) |
|
16 |
+ @checker.save! |
|
17 |
+ end |
|
18 |
+ |
|
19 |
+ describe "validating" do |
|
20 |
+ before do |
|
21 |
+ @checker.should be_valid |
|
22 |
+ end |
|
23 |
+ |
|
24 |
+ it "should require the basecamp username" do |
|
25 |
+ @checker.options['username'] = nil |
|
26 |
+ @checker.should_not be_valid |
|
27 |
+ end |
|
28 |
+ |
|
29 |
+ it "should require the basecamp password" do |
|
30 |
+ @checker.options['password'] = nil |
|
31 |
+ @checker.should_not be_valid |
|
32 |
+ end |
|
33 |
+ |
|
34 |
+ it "should require the basecamp user_id" do |
|
35 |
+ @checker.options['user_id'] = nil |
|
36 |
+ @checker.should_not be_valid |
|
37 |
+ end |
|
38 |
+ |
|
39 |
+ it "should require the basecamp project_id" do |
|
40 |
+ @checker.options['project_id'] = nil |
|
41 |
+ @checker.should_not be_valid |
|
42 |
+ end |
|
43 |
+ |
|
44 |
+ end |
|
45 |
+ |
|
46 |
+ describe "helpers" do |
|
47 |
+ it "should generate a correct request options hash" do |
|
48 |
+ @checker.send(:request_options).should == {:basic_auth=>{:username=>"user", :password=>"pass"}, :headers => {"User-Agent" => "Huginn (https://github.com/cantino/huginn)"}} |
|
49 |
+ end |
|
50 |
+ |
|
51 |
+ it "should generate the currect request url" do |
|
52 |
+ @checker.send(:request_url).should == "https://basecamp.com/12345/api/v1/projects/6789/events.json" |
|
53 |
+ end |
|
54 |
+ |
|
55 |
+ |
|
56 |
+ it "should not provide the since attribute on first run" do |
|
57 |
+ @checker.send(:query_parameters).should == {} |
|
58 |
+ end |
|
59 |
+ |
|
60 |
+ it "should provide the since attribute after the first run" do |
|
61 |
+ time = (Time.now-1.minute).iso8601 |
|
62 |
+ @checker.memory[:last_run] = time |
|
63 |
+ @checker.save |
|
64 |
+ @checker.reload.send(:query_parameters).should == {:query => {:since => time}} |
|
65 |
+ end |
|
66 |
+ end |
|
67 |
+ describe "#check" do |
|
68 |
+ it "should not emit events on its first run" do |
|
69 |
+ expect { @checker.check }.to change { Event.count }.by(0) |
|
70 |
+ end |
|
71 |
+ it "should check that initial run creates an event" do |
|
72 |
+ @checker.last_check_at = Time.now - 1.minute |
|
73 |
+ expect { @checker.check }.to change { Event.count }.by(1) |
|
74 |
+ end |
|
75 |
+ end |
|
76 |
+ |
|
77 |
+ describe "#working?" do |
|
78 |
+ it "it is working when at least one event was emited" do |
|
79 |
+ @checker.should_not be_working |
|
80 |
+ @checker.last_check_at = Time.now - 1.minute |
|
81 |
+ @checker.check |
|
82 |
+ @checker.reload.should be_working |
|
83 |
+ end |
|
84 |
+ end |
|
85 |
+end |
@@ -0,0 +1,171 @@ |
||
1 |
+# encoding: utf-8 |
|
2 |
+ |
|
3 |
+require 'spec_helper' |
|
4 |
+ |
|
5 |
+describe Agents::DataOutputAgent do |
|
6 |
+ let(:agent) do |
|
7 |
+ _agent = Agents::DataOutputAgent.new(:name => 'My Data Output Agent') |
|
8 |
+ _agent.options = _agent.default_options.merge('secrets' => ['secret1', 'secret2'], 'events_to_show' => 2) |
|
9 |
+ _agent.user = users(:bob) |
|
10 |
+ _agent.sources << agents(:bob_website_agent) |
|
11 |
+ _agent.save! |
|
12 |
+ _agent |
|
13 |
+ end |
|
14 |
+ |
|
15 |
+ describe "#working?" do |
|
16 |
+ it "checks if events have been received within expected receive period" do |
|
17 |
+ agent.should_not be_working |
|
18 |
+ Agents::DataOutputAgent.async_receive agent.id, [events(:bob_website_agent_event).id] |
|
19 |
+ agent.reload.should be_working |
|
20 |
+ two_days_from_now = 2.days.from_now |
|
21 |
+ stub(Time).now { two_days_from_now } |
|
22 |
+ agent.reload.should_not be_working |
|
23 |
+ end |
|
24 |
+ end |
|
25 |
+ |
|
26 |
+ describe "validation" do |
|
27 |
+ before do |
|
28 |
+ agent.should be_valid |
|
29 |
+ end |
|
30 |
+ |
|
31 |
+ it "should validate presence and length of secrets" do |
|
32 |
+ agent.options[:secrets] = "" |
|
33 |
+ agent.should_not be_valid |
|
34 |
+ agent.options[:secrets] = "foo" |
|
35 |
+ agent.should_not be_valid |
|
36 |
+ agent.options[:secrets] = [] |
|
37 |
+ agent.should_not be_valid |
|
38 |
+ agent.options[:secrets] = ["hello"] |
|
39 |
+ agent.should be_valid |
|
40 |
+ agent.options[:secrets] = ["hello", "world"] |
|
41 |
+ agent.should be_valid |
|
42 |
+ end |
|
43 |
+ |
|
44 |
+ it "should validate presence of expected_receive_period_in_days" do |
|
45 |
+ agent.options[:expected_receive_period_in_days] = "" |
|
46 |
+ agent.should_not be_valid |
|
47 |
+ agent.options[:expected_receive_period_in_days] = 0 |
|
48 |
+ agent.should_not be_valid |
|
49 |
+ agent.options[:expected_receive_period_in_days] = -1 |
|
50 |
+ agent.should_not be_valid |
|
51 |
+ end |
|
52 |
+ |
|
53 |
+ it "should validate presence of template and template.item" do |
|
54 |
+ agent.options[:template] = "" |
|
55 |
+ agent.should_not be_valid |
|
56 |
+ agent.options[:template] = {} |
|
57 |
+ agent.should_not be_valid |
|
58 |
+ agent.options[:template] = { 'item' => 'foo' } |
|
59 |
+ agent.should_not be_valid |
|
60 |
+ agent.options[:template] = { 'item' => { 'title' => 'hi' } } |
|
61 |
+ agent.should be_valid |
|
62 |
+ end |
|
63 |
+ end |
|
64 |
+ |
|
65 |
+ describe "#receive_web_request" do |
|
66 |
+ before do |
|
67 |
+ current_time = Time.now |
|
68 |
+ stub(Time).now { current_time } |
|
69 |
+ agents(:bob_website_agent).events.destroy_all |
|
70 |
+ end |
|
71 |
+ |
|
72 |
+ it "requires a valid secret" do |
|
73 |
+ content, status, content_type = agent.receive_web_request({ 'secret' => 'fake' }, 'get', 'text/xml') |
|
74 |
+ status.should == 401 |
|
75 |
+ content.should == "Not Authorized" |
|
76 |
+ |
|
77 |
+ content, status, content_type = agent.receive_web_request({ 'secret' => 'fake' }, 'get', 'application/json') |
|
78 |
+ status.should == 401 |
|
79 |
+ content.should == { :error => "Not Authorized" } |
|
80 |
+ |
|
81 |
+ content, status, content_type = agent.receive_web_request({ 'secret' => 'secret1' }, 'get', 'application/json') |
|
82 |
+ status.should == 200 |
|
83 |
+ end |
|
84 |
+ |
|
85 |
+ describe "returning events as RSS and JSON" do |
|
86 |
+ let!(:event1) do |
|
87 |
+ agents(:bob_website_agent).create_event :payload => { |
|
88 |
+ "url" => "http://imgs.xkcd.com/comics/evolving.png", |
|
89 |
+ "title" => "Evolving", |
|
90 |
+ "hovertext" => "Biologists play reverse Pokemon, trying to avoid putting any one team member on the front lines long enough for the experience to cause evolution." |
|
91 |
+ } |
|
92 |
+ end |
|
93 |
+ |
|
94 |
+ let!(:event2) do |
|
95 |
+ agents(:bob_website_agent).create_event :payload => { |
|
96 |
+ "url" => "http://imgs.xkcd.com/comics/evolving2.png", |
|
97 |
+ "title" => "Evolving again", |
|
98 |
+ "hovertext" => "Something else" |
|
99 |
+ } |
|
100 |
+ end |
|
101 |
+ |
|
102 |
+ it "can output RSS" do |
|
103 |
+ stub(agent).feed_link { "https://yoursite.com" } |
|
104 |
+ content, status, content_type = agent.receive_web_request({ 'secret' => 'secret1' }, 'get', 'text/xml') |
|
105 |
+ status.should == 200 |
|
106 |
+ content_type.should == 'text/xml' |
|
107 |
+ content.gsub(/\s+/, '').should == Utils.unindent(<<-XML).gsub(/\s+/, '') |
|
108 |
+ <?xml version="1.0" encoding="UTF-8" ?> |
|
109 |
+ <rss version="2.0"> |
|
110 |
+ <channel> |
|
111 |
+ <title>XKCD comics as a feed</title> |
|
112 |
+ <description>This is a feed of recent XKCD comics, generated by Huginn</description> |
|
113 |
+ <link>https://yoursite.com</link> |
|
114 |
+ <lastBuildDate>#{Time.now.rfc2822}</lastBuildDate> |
|
115 |
+ <pubDate>#{Time.now.rfc2822}</pubDate> |
|
116 |
+ <ttl>60</ttl> |
|
117 |
+ |
|
118 |
+ <item> |
|
119 |
+ <title>Evolving again</title> |
|
120 |
+ <description>Secret hovertext: Something else</description> |
|
121 |
+ <link>http://imgs.xkcd.com/comics/evolving2.png</link> |
|
122 |
+ <guid>#{event2.id}</guid> |
|
123 |
+ <pubDate>#{event2.created_at.rfc2822}</pubDate> |
|
124 |
+ </item> |
|
125 |
+ |
|
126 |
+ <item> |
|
127 |
+ <title>Evolving</title> |
|
128 |
+ <description>Secret hovertext: Biologists play reverse Pokemon, trying to avoid putting any one team member on the front lines long enough for the experience to cause evolution.</description> |
|
129 |
+ <link>http://imgs.xkcd.com/comics/evolving.png</link> |
|
130 |
+ <guid>#{event1.id}</guid> |
|
131 |
+ <pubDate>#{event1.created_at.rfc2822}</pubDate> |
|
132 |
+ </item> |
|
133 |
+ |
|
134 |
+ </channel> |
|
135 |
+ </rss> |
|
136 |
+ XML |
|
137 |
+ end |
|
138 |
+ |
|
139 |
+ it "can output JSON" do |
|
140 |
+ agent.options['template']['item']['foo'] = "hi" |
|
141 |
+ |
|
142 |
+ content, status, content_type = agent.receive_web_request({ 'secret' => 'secret2' }, 'get', 'application/json') |
|
143 |
+ status.should == 200 |
|
144 |
+ |
|
145 |
+ content.should == { |
|
146 |
+ 'title' => 'XKCD comics as a feed', |
|
147 |
+ 'description' => 'This is a feed of recent XKCD comics, generated by Huginn', |
|
148 |
+ 'pubDate' => Time.now, |
|
149 |
+ 'items' => [ |
|
150 |
+ { |
|
151 |
+ 'title' => 'Evolving again', |
|
152 |
+ 'description' => 'Secret hovertext: Something else', |
|
153 |
+ 'link' => 'http://imgs.xkcd.com/comics/evolving2.png', |
|
154 |
+ 'guid' => event2.id, |
|
155 |
+ 'pubDate' => event2.created_at.rfc2822, |
|
156 |
+ 'foo' => 'hi' |
|
157 |
+ }, |
|
158 |
+ { |
|
159 |
+ 'title' => 'Evolving', |
|
160 |
+ 'description' => 'Secret hovertext: Biologists play reverse Pokemon, trying to avoid putting any one team member on the front lines long enough for the experience to cause evolution.', |
|
161 |
+ 'link' => 'http://imgs.xkcd.com/comics/evolving.png', |
|
162 |
+ 'guid' => event1.id, |
|
163 |
+ 'pubDate' => event1.created_at.rfc2822, |
|
164 |
+ 'foo' => 'hi' |
|
165 |
+ } |
|
166 |
+ ] |
|
167 |
+ } |
|
168 |
+ end |
|
169 |
+ end |
|
170 |
+ end |
|
171 |
+end |
@@ -7,9 +7,16 @@ describe Agents::EventFormattingAgent do |
||
7 | 7 |
:options => { |
8 | 8 |
:instructions => { |
9 | 9 |
:message => "Received <$.content.text.*> from <$.content.name> .", |
10 |
- :subject => "Weather looks like <$.conditions>" |
|
10 |
+ :subject => "Weather looks like <$.conditions> according to the forecast at <$.pretty_date.time>" |
|
11 | 11 |
}, |
12 | 12 |
:mode => "clean", |
13 |
+ :matchers => [ |
|
14 |
+ { |
|
15 |
+ :path => "$.date.pretty", |
|
16 |
+ :regexp => "\\A(?<time>\\d\\d:\\d\\d [AP]M [A-Z]+)", |
|
17 |
+ :to => "pretty_date", |
|
18 |
+ }, |
|
19 |
+ ], |
|
13 | 20 |
:skip_agent => "false", |
14 | 21 |
:skip_created_at => "false" |
15 | 22 |
} |
@@ -24,7 +31,11 @@ describe Agents::EventFormattingAgent do |
||
24 | 31 |
@event.payload = { |
25 | 32 |
:content => { |
26 | 33 |
:text => "Some Lorem Ipsum", |
27 |
- :name => "somevalue" |
|
34 |
+ :name => "somevalue", |
|
35 |
+ }, |
|
36 |
+ :date => { |
|
37 |
+ :epoch => "1357959600", |
|
38 |
+ :pretty => "10:00 PM EST on January 11, 2013" |
|
28 | 39 |
}, |
29 | 40 |
:conditions => "someothervalue" |
30 | 41 |
} |
@@ -61,7 +72,11 @@ describe Agents::EventFormattingAgent do |
||
61 | 72 |
it "should handle JSONPaths in instructions" do |
62 | 73 |
@checker.receive([@event]) |
63 | 74 |
Event.last.payload[:message].should == "Received Some Lorem Ipsum from somevalue ." |
64 |
- Event.last.payload[:subject].should == "Weather looks like someothervalue" |
|
75 |
+ end |
|
76 |
+ |
|
77 |
+ it "should handle matchers and JSONPaths in instructions" do |
|
78 |
+ @checker.receive([@event]) |
|
79 |
+ Event.last.payload[:subject].should == "Weather looks like someothervalue according to the forecast at 10:00 PM EST" |
|
65 | 80 |
end |
66 | 81 |
|
67 | 82 |
it "should allow escaping" do |
@@ -110,6 +125,28 @@ describe Agents::EventFormattingAgent do |
||
110 | 125 |
@checker.should_not be_valid |
111 | 126 |
end |
112 | 127 |
|
128 |
+ it "should validate type of matchers" do |
|
129 |
+ @checker.options[:matchers] = "" |
|
130 |
+ @checker.should_not be_valid |
|
131 |
+ @checker.options[:matchers] = {} |
|
132 |
+ @checker.should_not be_valid |
|
133 |
+ end |
|
134 |
+ |
|
135 |
+ it "should validate the contents of matchers" do |
|
136 |
+ @checker.options[:matchers] = [ |
|
137 |
+ {} |
|
138 |
+ ] |
|
139 |
+ @checker.should_not be_valid |
|
140 |
+ @checker.options[:matchers] = [ |
|
141 |
+ { :regexp => "(not closed", :path => "text" } |
|
142 |
+ ] |
|
143 |
+ @checker.should_not be_valid |
|
144 |
+ @checker.options[:matchers] = [ |
|
145 |
+ { :regexp => "(closed)", :path => "text", :to => "foo" } |
|
146 |
+ ] |
|
147 |
+ @checker.should be_valid |
|
148 |
+ end |
|
149 |
+ |
|
113 | 150 |
it "should validate presence of mode" do |
114 | 151 |
@checker.options[:mode] = "" |
115 | 152 |
@checker.should_not be_valid |
@@ -125,4 +162,4 @@ describe Agents::EventFormattingAgent do |
||
125 | 162 |
@checker.should_not be_valid |
126 | 163 |
end |
127 | 164 |
end |
128 |
-end |
|
165 |
+end |
@@ -0,0 +1,79 @@ |
||
1 |
+require 'spec_helper' |
|
2 |
+require 'time' |
|
3 |
+ |
|
4 |
+describe Agents::FtpsiteAgent do |
|
5 |
+ describe "checking anonymous FTP" do |
|
6 |
+ before do |
|
7 |
+ @site = { |
|
8 |
+ 'expected_update_period_in_days' => 1, |
|
9 |
+ 'url' => "ftp://ftp.example.org/pub/releases/", |
|
10 |
+ 'patterns' => ["example-*.tar.gz"], |
|
11 |
+ } |
|
12 |
+ @checker = Agents::FtpsiteAgent.new(:name => "Example", :options => @site, :keep_events_for => 2) |
|
13 |
+ @checker.user = users(:bob) |
|
14 |
+ @checker.save! |
|
15 |
+ stub(@checker).each_entry.returns { |block| |
|
16 |
+ block.call("example-latest.tar.gz", Time.parse("2014-04-01T10:00:01Z")) |
|
17 |
+ block.call("example-1.0.tar.gz", Time.parse("2013-10-01T10:00:00Z")) |
|
18 |
+ block.call("example-1.1.tar.gz", Time.parse("2014-04-01T10:00:00Z")) |
|
19 |
+ } |
|
20 |
+ end |
|
21 |
+ |
|
22 |
+ describe "#check" do |
|
23 |
+ it "should validate the integer fields" do |
|
24 |
+ @checker.options['expected_update_period_in_days'] = "nonsense" |
|
25 |
+ lambda { @checker.save! }.should raise_error; |
|
26 |
+ @checker.options = @site |
|
27 |
+ end |
|
28 |
+ |
|
29 |
+ it "should check for changes and save known entries in memory" do |
|
30 |
+ lambda { @checker.check }.should change { Event.count }.by(3) |
|
31 |
+ @checker.memory['known_entries'].tap { |known_entries| |
|
32 |
+ known_entries.size.should == 3 |
|
33 |
+ known_entries.sort_by(&:last).should == [ |
|
34 |
+ ["example-1.0.tar.gz", "2013-10-01T10:00:00Z"], |
|
35 |
+ ["example-1.1.tar.gz", "2014-04-01T10:00:00Z"], |
|
36 |
+ ["example-latest.tar.gz", "2014-04-01T10:00:01Z"], |
|
37 |
+ ] |
|
38 |
+ } |
|
39 |
+ |
|
40 |
+ Event.last(2).first.payload.should == { |
|
41 |
+ 'url' => 'ftp://ftp.example.org/pub/releases/example-1.1.tar.gz', |
|
42 |
+ 'filename' => 'example-1.1.tar.gz', |
|
43 |
+ 'timestamp' => '2014-04-01T10:00:00Z', |
|
44 |
+ } |
|
45 |
+ |
|
46 |
+ lambda { @checker.check }.should_not change { Event.count } |
|
47 |
+ |
|
48 |
+ stub(@checker).each_entry.returns { |block| |
|
49 |
+ block.call("example-latest.tar.gz", Time.parse("2014-04-02T10:00:01Z")) |
|
50 |
+ |
|
51 |
+ # In the long list format the timestamp may look going |
|
52 |
+ # backwards after six months: Oct 01 10:00 -> Oct 01 2013 |
|
53 |
+ block.call("example-1.0.tar.gz", Time.parse("2013-10-01T00:00:00Z")) |
|
54 |
+ |
|
55 |
+ block.call("example-1.1.tar.gz", Time.parse("2014-04-01T10:00:00Z")) |
|
56 |
+ block.call("example-1.2.tar.gz", Time.parse("2014-04-02T10:00:00Z")) |
|
57 |
+ } |
|
58 |
+ lambda { @checker.check }.should change { Event.count }.by(2) |
|
59 |
+ @checker.memory['known_entries'].tap { |known_entries| |
|
60 |
+ known_entries.size.should == 4 |
|
61 |
+ known_entries.sort_by(&:last).should == [ |
|
62 |
+ ["example-1.0.tar.gz", "2013-10-01T00:00:00Z"], |
|
63 |
+ ["example-1.1.tar.gz", "2014-04-01T10:00:00Z"], |
|
64 |
+ ["example-1.2.tar.gz", "2014-04-02T10:00:00Z"], |
|
65 |
+ ["example-latest.tar.gz", "2014-04-02T10:00:01Z"], |
|
66 |
+ ] |
|
67 |
+ } |
|
68 |
+ |
|
69 |
+ Event.last(2).first.payload.should == { |
|
70 |
+ 'url' => 'ftp://ftp.example.org/pub/releases/example-1.2.tar.gz', |
|
71 |
+ 'filename' => 'example-1.2.tar.gz', |
|
72 |
+ 'timestamp' => '2014-04-02T10:00:00Z', |
|
73 |
+ } |
|
74 |
+ |
|
75 |
+ lambda { @checker.check }.should_not change { Event.count } |
|
76 |
+ end |
|
77 |
+ end |
|
78 |
+ end |
|
79 |
+end |
@@ -0,0 +1,103 @@ |
||
1 |
+require 'spec_helper' |
|
2 |
+ |
|
3 |
+describe Agents::HipchatAgent do |
|
4 |
+ before(:each) do |
|
5 |
+ @valid_params = { |
|
6 |
+ 'auth_token' => 'token', |
|
7 |
+ 'room_name' => 'test', |
|
8 |
+ 'room_name_path' => '', |
|
9 |
+ 'username' => "Huginn", |
|
10 |
+ 'username_path' => '$.username', |
|
11 |
+ 'message' => "Hello from Huginn!", |
|
12 |
+ 'message_path' => '$.message', |
|
13 |
+ 'notify' => false, |
|
14 |
+ 'notify_path' => '', |
|
15 |
+ 'color' => 'yellow', |
|
16 |
+ 'color_path' => '', |
|
17 |
+ } |
|
18 |
+ |
|
19 |
+ @checker = Agents::HipchatAgent.new(:name => "somename", :options => @valid_params) |
|
20 |
+ @checker.user = users(:jane) |
|
21 |
+ @checker.save! |
|
22 |
+ |
|
23 |
+ @event = Event.new |
|
24 |
+ @event.agent = agents(:bob_weather_agent) |
|
25 |
+ @event.payload = { :room_name => 'test room', :message => 'Looks like its going to rain', username: "Huggin user"} |
|
26 |
+ @event.save! |
|
27 |
+ end |
|
28 |
+ |
|
29 |
+ describe "validating" do |
|
30 |
+ before do |
|
31 |
+ @checker.should be_valid |
|
32 |
+ end |
|
33 |
+ |
|
34 |
+ it "should require the basecamp username" do |
|
35 |
+ @checker.options['auth_token'] = nil |
|
36 |
+ @checker.should_not be_valid |
|
37 |
+ end |
|
38 |
+ |
|
39 |
+ it "should require the basecamp password" do |
|
40 |
+ @checker.options['room_name'] = nil |
|
41 |
+ @checker.should_not be_valid |
|
42 |
+ end |
|
43 |
+ |
|
44 |
+ it "should require the basecamp user_id" do |
|
45 |
+ @checker.options['room_name'] = nil |
|
46 |
+ @checker.options['room_name_path'] = 'jsonpath' |
|
47 |
+ @checker.should be_valid |
|
48 |
+ end |
|
49 |
+ |
|
50 |
+ end |
|
51 |
+ |
|
52 |
+ describe "helpers" do |
|
53 |
+ describe "select_option" do |
|
54 |
+ it "should use the room_name_path if specified" do |
|
55 |
+ @checker.options['room_name_path'] = "$.room_name" |
|
56 |
+ @checker.send(:select_option, @event, :room_name).should == "test room" |
|
57 |
+ end |
|
58 |
+ |
|
59 |
+ it "should use the normal option when the path option is blank" do |
|
60 |
+ @checker.send(:select_option, @event, :room_name).should == "test" |
|
61 |
+ end |
|
62 |
+ end |
|
63 |
+ |
|
64 |
+ it "should merge all options" do |
|
65 |
+ @checker.send(:merge_options, @event).should == { |
|
66 |
+ :room_name => "test", |
|
67 |
+ :username => "Huggin user", |
|
68 |
+ :message => "Looks like its going to rain", |
|
69 |
+ :notify => false, |
|
70 |
+ :color => "yellow" |
|
71 |
+ } |
|
72 |
+ end |
|
73 |
+ end |
|
74 |
+ |
|
75 |
+ describe "#receive" do |
|
76 |
+ it "send a message to the hipchat" do |
|
77 |
+ any_instance_of(HipChat::Room) do |obj| |
|
78 |
+ mock(obj).send(@event.payload[:username], @event.payload[:message], {:notify => 0, :color => 'yellow'}) |
|
79 |
+ end |
|
80 |
+ @checker.receive([@event]) |
|
81 |
+ end |
|
82 |
+ end |
|
83 |
+ |
|
84 |
+ describe "#working?" do |
|
85 |
+ it "should not be working until the first event was received" do |
|
86 |
+ @checker.should_not be_working |
|
87 |
+ @checker.last_receive_at = Time.now |
|
88 |
+ @checker.should be_working |
|
89 |
+ end |
|
90 |
+ |
|
91 |
+ it "should not be working when the last error occured after the last received event" do |
|
92 |
+ @checker.last_receive_at = Time.now - 1.minute |
|
93 |
+ @checker.last_error_log_at = Time.now |
|
94 |
+ @checker.should_not be_working |
|
95 |
+ end |
|
96 |
+ |
|
97 |
+ it "should be working when the last received event occured after the last error" do |
|
98 |
+ @checker.last_receive_at = Time.now |
|
99 |
+ @checker.last_error_log_at = Time.now - 1.minute |
|
100 |
+ @checker.should be_working |
|
101 |
+ end |
|
102 |
+ end |
|
103 |
+end |
@@ -5,8 +5,11 @@ describe Agents::PostAgent do |
||
5 | 5 |
@valid_params = { |
6 | 6 |
:name => "somename", |
7 | 7 |
:options => { |
8 |
- :post_url => "http://www.example.com", |
|
9 |
- :expected_receive_period_in_days => 1 |
|
8 |
+ 'post_url' => "http://www.example.com", |
|
9 |
+ 'expected_receive_period_in_days' => 1, |
|
10 |
+ 'payload' => { |
|
11 |
+ 'default' => 'value' |
|
12 |
+ } |
|
10 | 13 |
} |
11 | 14 |
} |
12 | 15 |
|
@@ -17,28 +20,69 @@ describe Agents::PostAgent do |
||
17 | 20 |
@event = Event.new |
18 | 21 |
@event.agent = agents(:jane_weather_agent) |
19 | 22 |
@event.payload = { |
20 |
- :somekey => "somevalue", |
|
21 |
- :someotherkey => { |
|
22 |
- :somekey => "value" |
|
23 |
+ 'somekey' => 'somevalue', |
|
24 |
+ 'someotherkey' => { |
|
25 |
+ 'somekey' => 'value' |
|
23 | 26 |
} |
24 | 27 |
} |
25 | 28 |
|
26 |
- @sent_messages = [] |
|
27 |
- stub.any_instance_of(Agents::PostAgent).post_event { |uri, event| @sent_messages << event } |
|
29 |
+ @sent_posts = [] |
|
30 |
+ @sent_gets = [] |
|
31 |
+ stub.any_instance_of(Agents::PostAgent).post_data { |data| @sent_posts << data } |
|
32 |
+ stub.any_instance_of(Agents::PostAgent).get_data { |data| @sent_gets << data } |
|
28 | 33 |
end |
29 | 34 |
|
30 | 35 |
describe "#receive" do |
31 |
- it "checks if it can handle multiple events" do |
|
36 |
+ it "can handle multiple events and merge the payloads with options['payload']" do |
|
32 | 37 |
event1 = Event.new |
33 | 38 |
event1.agent = agents(:bob_weather_agent) |
34 | 39 |
event1.payload = { |
35 |
- :xyz => "value1", |
|
36 |
- :message => "value2" |
|
40 |
+ 'xyz' => 'value1', |
|
41 |
+ 'message' => 'value2', |
|
42 |
+ 'default' => 'value2' |
|
37 | 43 |
} |
38 | 44 |
|
39 | 45 |
lambda { |
40 |
- @checker.receive([@event, event1]) |
|
41 |
- }.should change { @sent_messages.length }.by(2) |
|
46 |
+ lambda { |
|
47 |
+ @checker.receive([@event, event1]) |
|
48 |
+ }.should change { @sent_posts.length }.by(2) |
|
49 |
+ }.should_not change { @sent_gets.length } |
|
50 |
+ |
|
51 |
+ @sent_posts[0].should == @event.payload.merge('default' => 'value') |
|
52 |
+ @sent_posts[1].should == event1.payload |
|
53 |
+ end |
|
54 |
+ |
|
55 |
+ it "can make GET requests" do |
|
56 |
+ @checker.options['method'] = 'get' |
|
57 |
+ |
|
58 |
+ lambda { |
|
59 |
+ lambda { |
|
60 |
+ @checker.receive([@event]) |
|
61 |
+ }.should change { @sent_gets.length }.by(1) |
|
62 |
+ }.should_not change { @sent_posts.length } |
|
63 |
+ |
|
64 |
+ @sent_gets[0].should == @event.payload.merge('default' => 'value') |
|
65 |
+ end |
|
66 |
+ end |
|
67 |
+ |
|
68 |
+ describe "#check" do |
|
69 |
+ it "sends options['payload'] as a POST request" do |
|
70 |
+ lambda { |
|
71 |
+ @checker.check |
|
72 |
+ }.should change { @sent_posts.length }.by(1) |
|
73 |
+ |
|
74 |
+ @sent_posts[0].should == @checker.options['payload'] |
|
75 |
+ end |
|
76 |
+ |
|
77 |
+ it "sends options['payload'] as a GET request" do |
|
78 |
+ @checker.options['method'] = 'get' |
|
79 |
+ lambda { |
|
80 |
+ lambda { |
|
81 |
+ @checker.check |
|
82 |
+ }.should change { @sent_gets.length }.by(1) |
|
83 |
+ }.should_not change { @sent_posts.length } |
|
84 |
+ |
|
85 |
+ @sent_gets[0].should == @checker.options['payload'] |
|
42 | 86 |
end |
43 | 87 |
end |
44 | 88 |
|
@@ -59,13 +103,82 @@ describe Agents::PostAgent do |
||
59 | 103 |
end |
60 | 104 |
|
61 | 105 |
it "should validate presence of post_url" do |
62 |
- @checker.options[:post_url] = "" |
|
106 |
+ @checker.options['post_url'] = "" |
|
63 | 107 |
@checker.should_not be_valid |
64 | 108 |
end |
65 | 109 |
|
66 | 110 |
it "should validate presence of expected_receive_period_in_days" do |
67 |
- @checker.options[:expected_receive_period_in_days] = "" |
|
111 |
+ @checker.options['expected_receive_period_in_days'] = "" |
|
68 | 112 |
@checker.should_not be_valid |
69 | 113 |
end |
114 |
+ |
|
115 |
+ it "should validate method as post or get, defaulting to post" do |
|
116 |
+ @checker.options['method'] = "" |
|
117 |
+ @checker.method.should == "post" |
|
118 |
+ @checker.should be_valid |
|
119 |
+ |
|
120 |
+ @checker.options['method'] = "POST" |
|
121 |
+ @checker.method.should == "post" |
|
122 |
+ @checker.should be_valid |
|
123 |
+ |
|
124 |
+ @checker.options['method'] = "get" |
|
125 |
+ @checker.method.should == "get" |
|
126 |
+ @checker.should be_valid |
|
127 |
+ |
|
128 |
+ @checker.options['method'] = "wut" |
|
129 |
+ @checker.method.should == "wut" |
|
130 |
+ @checker.should_not be_valid |
|
131 |
+ end |
|
132 |
+ |
|
133 |
+ it "should validate payload as a hash, if present" do |
|
134 |
+ @checker.options['payload'] = "" |
|
135 |
+ @checker.should be_valid |
|
136 |
+ |
|
137 |
+ @checker.options['payload'] = "hello" |
|
138 |
+ @checker.should_not be_valid |
|
139 |
+ |
|
140 |
+ @checker.options['payload'] = ["foo", "bar"] |
|
141 |
+ @checker.should_not be_valid |
|
142 |
+ |
|
143 |
+ @checker.options['payload'] = { 'this' => 'that' } |
|
144 |
+ @checker.should be_valid |
|
145 |
+ end |
|
146 |
+ |
|
147 |
+ it "requires headers to be a hash, if present" do |
|
148 |
+ @checker.options['headers'] = [1,2,3] |
|
149 |
+ @checker.should_not be_valid |
|
150 |
+ |
|
151 |
+ @checker.options['headers'] = "hello world" |
|
152 |
+ @checker.should_not be_valid |
|
153 |
+ |
|
154 |
+ @checker.options['headers'] = "" |
|
155 |
+ @checker.should be_valid |
|
156 |
+ |
|
157 |
+ @checker.options['headers'] = {} |
|
158 |
+ @checker.should be_valid |
|
159 |
+ |
|
160 |
+ @checker.options['headers'] = { "Authorization" => "foo bar" } |
|
161 |
+ @checker.should be_valid |
|
162 |
+ end |
|
163 |
+ end |
|
164 |
+ |
|
165 |
+ describe "#generate_uri" do |
|
166 |
+ it "merges params with any in the post_url" do |
|
167 |
+ @checker.options['post_url'] = "http://example.com/a/path?existing_param=existing_value" |
|
168 |
+ uri = @checker.generate_uri("some_param" => "some_value", "another_param" => "another_value") |
|
169 |
+ uri.request_uri.should == "/a/path?existing_param=existing_value&some_param=some_value&another_param=another_value" |
|
170 |
+ end |
|
171 |
+ |
|
172 |
+ it "works fine with urls that do not have a query" do |
|
173 |
+ @checker.options['post_url'] = "http://example.com/a/path" |
|
174 |
+ uri = @checker.generate_uri("some_param" => "some_value", "another_param" => "another_value") |
|
175 |
+ uri.request_uri.should == "/a/path?some_param=some_value&another_param=another_value" |
|
176 |
+ end |
|
177 |
+ |
|
178 |
+ it "just returns the post_uri when no params are given" do |
|
179 |
+ @checker.options['post_url'] = "http://example.com/a/path?existing_param=existing_value" |
|
180 |
+ uri = @checker.generate_uri |
|
181 |
+ uri.request_uri.should == "/a/path?existing_param=existing_value" |
|
182 |
+ end |
|
70 | 183 |
end |
71 | 184 |
end |
@@ -0,0 +1,99 @@ |
||
1 |
+require 'spec_helper' |
|
2 |
+ |
|
3 |
+describe Agents::ShellCommandAgent do |
|
4 |
+ before do |
|
5 |
+ @valid_path = Dir.pwd |
|
6 |
+ |
|
7 |
+ @valid_params = { |
|
8 |
+ :path => @valid_path, |
|
9 |
+ :command => "pwd", |
|
10 |
+ :expected_update_period_in_days => "1", |
|
11 |
+ } |
|
12 |
+ |
|
13 |
+ @checker = Agents::ShellCommandAgent.new(:name => "somename", :options => @valid_params) |
|
14 |
+ @checker.user = users(:jane) |
|
15 |
+ @checker.save! |
|
16 |
+ |
|
17 |
+ @event = Event.new |
|
18 |
+ @event.agent = agents(:jane_weather_agent) |
|
19 |
+ @event.payload = { |
|
20 |
+ :command => "ls" |
|
21 |
+ } |
|
22 |
+ @event.save! |
|
23 |
+ |
|
24 |
+ stub(Agents::ShellCommandAgent).should_run? { true } |
|
25 |
+ end |
|
26 |
+ |
|
27 |
+ describe "validation" do |
|
28 |
+ before do |
|
29 |
+ @checker.should be_valid |
|
30 |
+ end |
|
31 |
+ |
|
32 |
+ it "should validate presence of necessary fields" do |
|
33 |
+ @checker.options[:command] = nil |
|
34 |
+ @checker.should_not be_valid |
|
35 |
+ end |
|
36 |
+ |
|
37 |
+ it "should validate path" do |
|
38 |
+ @checker.options[:path] = 'notarealpath/itreallyisnt' |
|
39 |
+ @checker.should_not be_valid |
|
40 |
+ end |
|
41 |
+ |
|
42 |
+ it "should validate path" do |
|
43 |
+ @checker.options[:path] = '/' |
|
44 |
+ @checker.should be_valid |
|
45 |
+ end |
|
46 |
+ end |
|
47 |
+ |
|
48 |
+ describe "#working?" do |
|
49 |
+ it "generating events as scheduled" do |
|
50 |
+ stub(@checker).run_command(@valid_path, 'pwd') { ["fake pwd output", "", 0] } |
|
51 |
+ |
|
52 |
+ @checker.should_not be_working |
|
53 |
+ @checker.check |
|
54 |
+ @checker.reload.should be_working |
|
55 |
+ three_days_from_now = 3.days.from_now |
|
56 |
+ stub(Time).now { three_days_from_now } |
|
57 |
+ @checker.should_not be_working |
|
58 |
+ end |
|
59 |
+ end |
|
60 |
+ |
|
61 |
+ describe "#check" do |
|
62 |
+ before do |
|
63 |
+ stub(@checker).run_command(@valid_path, 'pwd') { ["fake pwd output", "", 0] } |
|
64 |
+ end |
|
65 |
+ |
|
66 |
+ it "should create an event when checking" do |
|
67 |
+ expect { @checker.check }.to change { Event.count }.by(1) |
|
68 |
+ Event.last.payload[:path].should == @valid_path |
|
69 |
+ Event.last.payload[:command].should == 'pwd' |
|
70 |
+ Event.last.payload[:output].should == "fake pwd output" |
|
71 |
+ end |
|
72 |
+ |
|
73 |
+ it "does not run when should_run? is false" do |
|
74 |
+ stub(Agents::ShellCommandAgent).should_run? { false } |
|
75 |
+ expect { @checker.check }.not_to change { Event.count } |
|
76 |
+ end |
|
77 |
+ end |
|
78 |
+ |
|
79 |
+ describe "#receive" do |
|
80 |
+ before do |
|
81 |
+ stub(@checker).run_command(@valid_path, @event.payload[:command]) { ["fake ls output", "", 0] } |
|
82 |
+ end |
|
83 |
+ |
|
84 |
+ it "creates events" do |
|
85 |
+ @checker.receive([@event]) |
|
86 |
+ Event.last.payload[:path].should == @valid_path |
|
87 |
+ Event.last.payload[:command].should == @event.payload[:command] |
|
88 |
+ Event.last.payload[:output].should == "fake ls output" |
|
89 |
+ end |
|
90 |
+ |
|
91 |
+ it "does not run when should_run? is false" do |
|
92 |
+ stub(Agents::ShellCommandAgent).should_run? { false } |
|
93 |
+ |
|
94 |
+ expect { |
|
95 |
+ @checker.receive([@event]) |
|
96 |
+ }.not_to change { Event.count } |
|
97 |
+ end |
|
98 |
+ end |
|
99 |
+end |
@@ -71,6 +71,28 @@ describe Agents::TriggerAgent do |
||
71 | 71 |
}.should change { Event.count }.by(1) |
72 | 72 |
end |
73 | 73 |
|
74 |
+ it "handles array of regex" do |
|
75 |
+ @event.payload['foo']['bar']['baz'] = "a222b" |
|
76 |
+ @checker.options['rules'][0] = { |
|
77 |
+ 'type' => "regex", |
|
78 |
+ 'value' => ["a\\db", "a\\Wb"], |
|
79 |
+ 'path' => "foo.bar.baz", |
|
80 |
+ } |
|
81 |
+ lambda { |
|
82 |
+ @checker.receive([@event]) |
|
83 |
+ }.should_not change { Event.count } |
|
84 |
+ |
|
85 |
+ @event.payload['foo']['bar']['baz'] = "a2b" |
|
86 |
+ lambda { |
|
87 |
+ @checker.receive([@event]) |
|
88 |
+ }.should change { Event.count }.by(1) |
|
89 |
+ |
|
90 |
+ @event.payload['foo']['bar']['baz'] = "a b" |
|
91 |
+ lambda { |
|
92 |
+ @checker.receive([@event]) |
|
93 |
+ }.should change { Event.count }.by(1) |
|
94 |
+ end |
|
95 |
+ |
|
74 | 96 |
it "handles negated regex" do |
75 | 97 |
@event.payload['foo']['bar']['baz'] = "a2b" |
76 | 98 |
@checker.options['rules'][0] = { |
@@ -89,6 +111,24 @@ describe Agents::TriggerAgent do |
||
89 | 111 |
}.should change { Event.count }.by(1) |
90 | 112 |
end |
91 | 113 |
|
114 |
+ it "handles array of negated regex" do |
|
115 |
+ @event.payload['foo']['bar']['baz'] = "a2b" |
|
116 |
+ @checker.options['rules'][0] = { |
|
117 |
+ 'type' => "!regex", |
|
118 |
+ 'value' => ["a\\db", "a2b"], |
|
119 |
+ 'path' => "foo.bar.baz", |
|
120 |
+ } |
|
121 |
+ |
|
122 |
+ lambda { |
|
123 |
+ @checker.receive([@event]) |
|
124 |
+ }.should_not change { Event.count } |
|
125 |
+ |
|
126 |
+ @event.payload['foo']['bar']['baz'] = "a3b" |
|
127 |
+ lambda { |
|
128 |
+ @checker.receive([@event]) |
|
129 |
+ }.should change { Event.count }.by(1) |
|
130 |
+ end |
|
131 |
+ |
|
92 | 132 |
it "puts can extract values into the message based on paths" do |
93 | 133 |
@checker.receive([@event]) |
94 | 134 |
Event.last.payload['message'].should == "I saw 'a2b' from Joe" |
@@ -109,6 +149,21 @@ describe Agents::TriggerAgent do |
||
109 | 149 |
}.should_not change { Event.count } |
110 | 150 |
end |
111 | 151 |
|
152 |
+ it "handles array of numerical comparisons" do |
|
153 |
+ @event.payload['foo']['bar']['baz'] = "5" |
|
154 |
+ @checker.options['rules'].first['value'] = [6, 3] |
|
155 |
+ @checker.options['rules'].first['type'] = "field<value" |
|
156 |
+ |
|
157 |
+ lambda { |
|
158 |
+ @checker.receive([@event]) |
|
159 |
+ }.should change { Event.count }.by(1) |
|
160 |
+ |
|
161 |
+ @checker.options['rules'].first['value'] = [4, 3] |
|
162 |
+ lambda { |
|
163 |
+ @checker.receive([@event]) |
|
164 |
+ }.should_not change { Event.count } |
|
165 |
+ end |
|
166 |
+ |
|
112 | 167 |
it "handles exact comparisons" do |
113 | 168 |
@event.payload['foo']['bar']['baz'] = "hello world" |
114 | 169 |
@checker.options['rules'].first['type'] = "field==value" |
@@ -124,6 +179,21 @@ describe Agents::TriggerAgent do |
||
124 | 179 |
}.should change { Event.count }.by(1) |
125 | 180 |
end |
126 | 181 |
|
182 |
+ it "handles array of exact comparisons" do |
|
183 |
+ @event.payload['foo']['bar']['baz'] = "hello world" |
|
184 |
+ @checker.options['rules'].first['type'] = "field==value" |
|
185 |
+ |
|
186 |
+ @checker.options['rules'].first['value'] = ["hello there", "hello universe"] |
|
187 |
+ lambda { |
|
188 |
+ @checker.receive([@event]) |
|
189 |
+ }.should_not change { Event.count } |
|
190 |
+ |
|
191 |
+ @checker.options['rules'].first['value'] = ["hello world", "hello universe"] |
|
192 |
+ lambda { |
|
193 |
+ @checker.receive([@event]) |
|
194 |
+ }.should change { Event.count }.by(1) |
|
195 |
+ end |
|
196 |
+ |
|
127 | 197 |
it "handles negated comparisons" do |
128 | 198 |
@event.payload['foo']['bar']['baz'] = "hello world" |
129 | 199 |
@checker.options['rules'].first['type'] = "field!=value" |
@@ -140,6 +210,22 @@ describe Agents::TriggerAgent do |
||
140 | 210 |
}.should change { Event.count }.by(1) |
141 | 211 |
end |
142 | 212 |
|
213 |
+ it "handles array of negated comparisons" do |
|
214 |
+ @event.payload['foo']['bar']['baz'] = "hello world" |
|
215 |
+ @checker.options['rules'].first['type'] = "field!=value" |
|
216 |
+ @checker.options['rules'].first['value'] = ["hello world", "hello world"] |
|
217 |
+ |
|
218 |
+ lambda { |
|
219 |
+ @checker.receive([@event]) |
|
220 |
+ }.should_not change { Event.count } |
|
221 |
+ |
|
222 |
+ @checker.options['rules'].first['value'] = ["hello there", "hello world"] |
|
223 |
+ |
|
224 |
+ lambda { |
|
225 |
+ @checker.receive([@event]) |
|
226 |
+ }.should change { Event.count }.by(1) |
|
227 |
+ end |
|
228 |
+ |
|
143 | 229 |
it "does fine without dots in the path" do |
144 | 230 |
@event.payload = { 'hello' => "world" } |
145 | 231 |
@checker.options['rules'].first['type'] = "field==value" |
@@ -22,7 +22,10 @@ describe Agents::TwitterPublishAgent do |
||
22 | 22 |
@event.save! |
23 | 23 |
|
24 | 24 |
@sent_messages = [] |
25 |
- stub.any_instance_of(Agents::TwitterPublishAgent).publish_tweet { |message| @sent_messages << message} |
|
25 |
+ stub.any_instance_of(Agents::TwitterPublishAgent).publish_tweet { |message| |
|
26 |
+ @sent_messages << message |
|
27 |
+ OpenStruct.new(:id => 454209588376502272) |
|
28 |
+ } |
|
26 | 29 |
end |
27 | 30 |
|
28 | 31 |
describe '#receive' do |
@@ -53,4 +56,4 @@ describe Agents::TwitterPublishAgent do |
||
53 | 56 |
@checker.reload.should_not be_working # More time has passed than the expected receive period without any new events |
54 | 57 |
end |
55 | 58 |
end |
56 |
-end |
|
59 |
+end |
@@ -10,11 +10,11 @@ describe Agents::WebhookAgent do |
||
10 | 10 |
end |
11 | 11 |
let(:payload) { {'some' => 'info'} } |
12 | 12 |
|
13 |
- describe 'receive_webhook' do |
|
13 |
+ describe 'receive_web_request' do |
|
14 | 14 |
it 'should create event if secret matches' do |
15 | 15 |
out = nil |
16 | 16 |
lambda { |
17 |
- out = agent.receive_webhook('secret' => 'foobar', 'payload' => payload) |
|
17 |
+ out = agent.receive_web_request({ 'secret' => 'foobar', 'payload' => payload }, "post", "text/html") |
|
18 | 18 |
}.should change { Event.count }.by(1) |
19 | 19 |
out.should eq(['Event Created', 201]) |
20 | 20 |
Event.last.payload.should eq(payload) |
@@ -23,9 +23,17 @@ describe Agents::WebhookAgent do |
||
23 | 23 |
it 'should not create event if secrets dont match' do |
24 | 24 |
out = nil |
25 | 25 |
lambda { |
26 |
- out = agent.receive_webhook('secret' => 'bazbat', 'payload' => payload) |
|
26 |
+ out = agent.receive_web_request({ 'secret' => 'bazbat', 'payload' => payload }, "post", "text/html") |
|
27 | 27 |
}.should change { Event.count }.by(0) |
28 | 28 |
out.should eq(['Not Authorized', 401]) |
29 | 29 |
end |
30 |
+ |
|
31 |
+ it "should only accept POSTs" do |
|
32 |
+ out = nil |
|
33 |
+ lambda { |
|
34 |
+ out = agent.receive_web_request({ 'secret' => 'foobar', 'payload' => payload }, "get", "text/html") |
|
35 |
+ }.should change { Event.count }.by(0) |
|
36 |
+ out.should eq(['Please use POST requests only', 401]) |
|
37 |
+ end |
|
30 | 38 |
end |
31 | 39 |
end |
@@ -11,8 +11,9 @@ describe Agents::WebsiteAgent do |
||
11 | 11 |
'url' => "http://xkcd.com", |
12 | 12 |
'mode' => 'on_change', |
13 | 13 |
'extract' => { |
14 |
- 'url' => {'css' => "#comic img", 'attr' => "src"}, |
|
15 |
- 'title' => {'css' => "#comic img", 'attr' => "title"} |
|
14 |
+ 'url' => { 'css' => "#comic img", 'attr' => "src" }, |
|
15 |
+ 'title' => { 'css' => "#comic img", 'attr' => "alt" }, |
|
16 |
+ 'hovertext' => { 'css' => "#comic img", 'attr' => "title" } |
|
16 | 17 |
} |
17 | 18 |
} |
18 | 19 |
@checker = Agents::WebsiteAgent.new(:name => "xkcd", :options => @site, :keep_events_for => 2) |
@@ -21,7 +22,6 @@ describe Agents::WebsiteAgent do |
||
21 | 22 |
end |
22 | 23 |
|
23 | 24 |
describe "#check" do |
24 |
- |
|
25 | 25 |
it "should validate the integer fields" do |
26 | 26 |
@checker.options['expected_update_period_in_days'] = "nonsense" |
27 | 27 |
lambda { @checker.save! }.should raise_error; |
@@ -32,7 +32,17 @@ describe Agents::WebsiteAgent do |
||
32 | 32 |
lambda { @checker.save! }.should raise_error; |
33 | 33 |
@checker.options = @site |
34 | 34 |
end |
35 |
- |
|
35 |
+ |
|
36 |
+ it "should validate the force_encoding option" do |
|
37 |
+ @checker.options['force_encoding'] = 'UTF-8' |
|
38 |
+ lambda { @checker.save! }.should_not raise_error; |
|
39 |
+ @checker.options['force_encoding'] = ['UTF-8'] |
|
40 |
+ lambda { @checker.save! }.should raise_error; |
|
41 |
+ @checker.options['force_encoding'] = 'UTF-42' |
|
42 |
+ lambda { @checker.save! }.should raise_error; |
|
43 |
+ @checker.options = @site |
|
44 |
+ end |
|
45 |
+ |
|
36 | 46 |
it "should check for changes (and update Event.expires_at)" do |
37 | 47 |
lambda { @checker.check }.should change { Event.count }.by(1) |
38 | 48 |
event = Event.last |
@@ -81,6 +91,86 @@ describe Agents::WebsiteAgent do |
||
81 | 91 |
@checker.check |
82 | 92 |
@checker.logs.first.message.should =~ /Got an uneven number of matches/ |
83 | 93 |
end |
94 |
+ |
|
95 |
+ it "should accept an array for url" do |
|
96 |
+ @site['url'] = ["http://xkcd.com/1/", "http://xkcd.com/2/"] |
|
97 |
+ @checker.options = @site |
|
98 |
+ lambda { @checker.save! }.should_not raise_error; |
|
99 |
+ lambda { @checker.check }.should_not raise_error; |
|
100 |
+ end |
|
101 |
+ |
|
102 |
+ it "should parse events from all urls in array" do |
|
103 |
+ lambda { |
|
104 |
+ @site['url'] = ["http://xkcd.com/", "http://xkcd.com/"] |
|
105 |
+ @site['mode'] = 'all' |
|
106 |
+ @checker.options = @site |
|
107 |
+ @checker.check |
|
108 |
+ }.should change { Event.count }.by(2) |
|
109 |
+ end |
|
110 |
+ |
|
111 |
+ it "should follow unique rules when parsing array of urls" do |
|
112 |
+ lambda { |
|
113 |
+ @site['url'] = ["http://xkcd.com/", "http://xkcd.com/"] |
|
114 |
+ @checker.options = @site |
|
115 |
+ @checker.check |
|
116 |
+ }.should change { Event.count }.by(1) |
|
117 |
+ end |
|
118 |
+ end |
|
119 |
+ |
|
120 |
+ describe 'encoding' do |
|
121 |
+ it 'should be forced with force_encoding option' do |
|
122 |
+ huginn = "\u{601d}\u{8003}" |
|
123 |
+ stub_request(:any, /no-encoding/).to_return(:body => { |
|
124 |
+ :value => huginn, |
|
125 |
+ }.to_json.encode(Encoding::EUC_JP), :headers => { |
|
126 |
+ 'Content-Type' => 'application/json', |
|
127 |
+ }, :status => 200) |
|
128 |
+ site = { |
|
129 |
+ 'name' => "Some JSON Response", |
|
130 |
+ 'expected_update_period_in_days' => 2, |
|
131 |
+ 'type' => "json", |
|
132 |
+ 'url' => "http://no-encoding.example.com", |
|
133 |
+ 'mode' => 'on_change', |
|
134 |
+ 'extract' => { |
|
135 |
+ 'value' => { 'path' => 'value' }, |
|
136 |
+ }, |
|
137 |
+ 'force_encoding' => 'EUC-JP', |
|
138 |
+ } |
|
139 |
+ checker = Agents::WebsiteAgent.new(:name => "No Encoding Site", :options => site) |
|
140 |
+ checker.user = users(:bob) |
|
141 |
+ checker.save! |
|
142 |
+ |
|
143 |
+ checker.check |
|
144 |
+ event = Event.last |
|
145 |
+ event.payload['value'].should == huginn |
|
146 |
+ end |
|
147 |
+ |
|
148 |
+ it 'should be overridden with force_encoding option' do |
|
149 |
+ huginn = "\u{601d}\u{8003}" |
|
150 |
+ stub_request(:any, /wrong-encoding/).to_return(:body => { |
|
151 |
+ :value => huginn, |
|
152 |
+ }.to_json.encode(Encoding::EUC_JP), :headers => { |
|
153 |
+ 'Content-Type' => 'application/json; UTF-8', |
|
154 |
+ }, :status => 200) |
|
155 |
+ site = { |
|
156 |
+ 'name' => "Some JSON Response", |
|
157 |
+ 'expected_update_period_in_days' => 2, |
|
158 |
+ 'type' => "json", |
|
159 |
+ 'url' => "http://wrong-encoding.example.com", |
|
160 |
+ 'mode' => 'on_change', |
|
161 |
+ 'extract' => { |
|
162 |
+ 'value' => { 'path' => 'value' }, |
|
163 |
+ }, |
|
164 |
+ 'force_encoding' => 'EUC-JP', |
|
165 |
+ } |
|
166 |
+ checker = Agents::WebsiteAgent.new(:name => "Wrong Encoding Site", :options => site) |
|
167 |
+ checker.user = users(:bob) |
|
168 |
+ checker.save! |
|
169 |
+ |
|
170 |
+ checker.check |
|
171 |
+ event = Event.last |
|
172 |
+ event.payload['value'].should == huginn |
|
173 |
+ end |
|
84 | 174 |
end |
85 | 175 |
|
86 | 176 |
describe '#working?' do |
@@ -110,7 +200,21 @@ describe Agents::WebsiteAgent do |
||
110 | 200 |
@checker.check |
111 | 201 |
event = Event.last |
112 | 202 |
event.payload['url'].should == "http://imgs.xkcd.com/comics/evolving.png" |
113 |
- event.payload['title'].should =~ /^Biologists play reverse/ |
|
203 |
+ event.payload['title'].should == "Evolving" |
|
204 |
+ event.payload['hovertext'].should =~ /^Biologists play reverse/ |
|
205 |
+ end |
|
206 |
+ |
|
207 |
+ it "parses XPath" do |
|
208 |
+ @site['extract'].each { |key, value| |
|
209 |
+ value.delete('css') |
|
210 |
+ value['xpath'] = "//*[@id='comic']//img" |
|
211 |
+ } |
|
212 |
+ @checker.options = @site |
|
213 |
+ @checker.check |
|
214 |
+ event = Event.last |
|
215 |
+ event.payload['url'].should == "http://imgs.xkcd.com/comics/evolving.png" |
|
216 |
+ event.payload['title'].should == "Evolving" |
|
217 |
+ event.payload['hovertext'].should =~ /^Biologists play reverse/ |
|
114 | 218 |
end |
115 | 219 |
|
116 | 220 |
it "should turn relative urls to absolute" do |
@@ -239,8 +343,9 @@ describe Agents::WebsiteAgent do |
||
239 | 343 |
'url' => "http://www.example.com", |
240 | 344 |
'mode' => 'on_change', |
241 | 345 |
'extract' => { |
242 |
- 'url' => {'css' => "#comic img", 'attr' => "src"}, |
|
243 |
- 'title' => {'css' => "#comic img", 'attr' => "title"} |
|
346 |
+ 'url' => { 'css' => "#comic img", 'attr' => "src" }, |
|
347 |
+ 'title' => { 'css' => "#comic img", 'attr' => "alt" }, |
|
348 |
+ 'hovertext' => { 'css' => "#comic img", 'attr' => "title" } |
|
244 | 349 |
}, |
245 | 350 |
'basic_auth' => "user:pass" |
246 | 351 |
} |
@@ -256,4 +361,4 @@ describe Agents::WebsiteAgent do |
||
256 | 361 |
end |
257 | 362 |
end |
258 | 363 |
end |
259 |
-end |
|
364 |
+end |
@@ -0,0 +1,23 @@ |
||
1 |
+require 'spec_helper' |
|
2 |
+ |
|
3 |
+describe "routing for web requests" do |
|
4 |
+ it "routes to handle_request" do |
|
5 |
+ resulting_params = { :user_id => "6", :agent_id => "2", :secret => "foobar" } |
|
6 |
+ get("/users/6/web_requests/2/foobar").should route_to("web_requests#handle_request", resulting_params) |
|
7 |
+ post("/users/6/web_requests/2/foobar").should route_to("web_requests#handle_request", resulting_params) |
|
8 |
+ put("/users/6/web_requests/2/foobar").should route_to("web_requests#handle_request", resulting_params) |
|
9 |
+ delete("/users/6/web_requests/2/foobar").should route_to("web_requests#handle_request", resulting_params) |
|
10 |
+ end |
|
11 |
+ |
|
12 |
+ it "supports the legacy /webhooks/ route" do |
|
13 |
+ post("/users/6/webhooks/2/foobar").should route_to("web_requests#handle_request", :user_id => "6", :agent_id => "2", :secret => "foobar") |
|
14 |
+ end |
|
15 |
+ |
|
16 |
+ it "routes with format" do |
|
17 |
+ get("/users/6/web_requests/2/foobar.json").should route_to("web_requests#handle_request", |
|
18 |
+ { :user_id => "6", :agent_id => "2", :secret => "foobar", :format => "json" }) |
|
19 |
+ |
|
20 |
+ get("/users/6/web_requests/2/foobar.atom").should route_to("web_requests#handle_request", |
|
21 |
+ { :user_id => "6", :agent_id => "2", :secret => "foobar", :format => "atom" }) |
|
22 |
+ end |
|
23 |
+end |